# Importing Streamlit for building the web-based interactive application framework import streamlit as st # Function to Display Model Performance def display_text_model_performance(): # Basic HTML table without additional styling html_content = """
This table presents the performance of various regression models for different companies.
Metric | Logistic Regression | Naive Bayes | Random Forest | Accuracy | 0.6516 | 0.6672 | 0.9900 |
---|---|---|---|
Class 0 Precision | 0.62 | 0.65 | 0.99 |
Class 0 Recall | 0.82 | 0.77 | 1.00 |
Class 0 F1-Score | 0.71 | 0.70 | 0.99 |
Class 0 Support | 2395 | 2395 | 2395 |
Class 1 Precision | 0.72 | 0.70 | 1.00 |
Class 1 Recall | 0.48 | 0.56 | 0.98 |
Class 1 F1-Score | 0.57 | 0.62 | 0.99 |
Class 1 Support | 2304 | 2304 | 2304 |
Macro Avg Precision | 0.67 | 0.67 | 0.99 |
Macro Avg Recall | 0.65 | 0.67 | 0.99 |
Macro Avg F1-Score | 0.64 | 0.66 | 0.99 |
Weighted Avg Precision | 0.67 | 0.67 | 0.99 |
Weighted Avg Recall | 0.65 | 0.67 | 0.99 |
Weighted Avg F1-Score | 0.64 | 0.66 | 0.99 |
Total Support | 4699 | 4699 | 4699 |