Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
"""
|
| 2 |
-
LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, and
|
| 3 |
"""
|
| 4 |
import gradio as gr
|
| 5 |
import pandas as pd
|
|
@@ -8,27 +8,16 @@ import logging
|
|
| 8 |
import plotly.express as px
|
| 9 |
from sklearn.ensemble import IsolationForest
|
| 10 |
from concurrent.futures import ThreadPoolExecutor
|
| 11 |
-
from simple_salesforce import Salesforce
|
| 12 |
import os
|
| 13 |
-
import json
|
| 14 |
import io
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
# Configure logging
|
| 17 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 18 |
|
| 19 |
-
# Salesforce configuration
|
| 20 |
-
try:
|
| 21 |
-
sf = Salesforce(
|
| 22 |
-
username='multi-devicelabopsdashboard@sathkrutha.com',
|
| 23 |
-
password='Team@1234',
|
| 24 |
-
security_token=os.getenv('SF_SECURITY_TOKEN', ''),
|
| 25 |
-
domain='login'
|
| 26 |
-
)
|
| 27 |
-
logging.info("Salesforce connection established")
|
| 28 |
-
except Exception as e:
|
| 29 |
-
logging.error(f"Failed to connect to Salesforce: {str(e)}")
|
| 30 |
-
sf = None
|
| 31 |
-
|
| 32 |
# Try to import reportlab
|
| 33 |
try:
|
| 34 |
from reportlab.lib.pagesizes import letter
|
|
@@ -40,229 +29,6 @@ except ImportError:
|
|
| 40 |
logging.warning("reportlab module not found. PDF generation disabled.")
|
| 41 |
reportlab_available = False
|
| 42 |
|
| 43 |
-
# Fetch valid picklist values from Salesforce
|
| 44 |
-
def get_picklist_values(field_name):
|
| 45 |
-
if sf is None:
|
| 46 |
-
return []
|
| 47 |
-
try:
|
| 48 |
-
obj_desc = sf.SmartLog__c.describe()
|
| 49 |
-
for field in obj_desc['fields']:
|
| 50 |
-
if field['name'] == field_name:
|
| 51 |
-
return [value['value'] for value in field['picklistValues'] if value['active']]
|
| 52 |
-
return []
|
| 53 |
-
except Exception as e:
|
| 54 |
-
logging.error(f"Failed to fetch picklist values for {field_name}: {str(e)}")
|
| 55 |
-
return []
|
| 56 |
-
|
| 57 |
-
# Cache picklist values at startup
|
| 58 |
-
status_values = get_picklist_values('Status__c') or ["Active", "Inactive", "Pending"]
|
| 59 |
-
log_type_values = get_picklist_values('Log_Type__c') or ["Smart Log", "Cell Analysis", "UV Verification"]
|
| 60 |
-
logging.info(f"Valid Status__c values: {status_values}")
|
| 61 |
-
logging.info(f"Valid Log_Type__c values: {log_type_values}")
|
| 62 |
-
|
| 63 |
-
# Map invalid picklist values to valid ones
|
| 64 |
-
picklist_mapping = {
|
| 65 |
-
'Status__c': {
|
| 66 |
-
'normal': 'Active',
|
| 67 |
-
'error': 'Inactive',
|
| 68 |
-
'warning': 'Pending',
|
| 69 |
-
'ok': 'Active',
|
| 70 |
-
'failed': 'Inactive'
|
| 71 |
-
},
|
| 72 |
-
'Log_Type__c': {
|
| 73 |
-
'maint': 'Smart Log',
|
| 74 |
-
'error': 'Cell Analysis',
|
| 75 |
-
'ops': 'UV Verification',
|
| 76 |
-
'maintenance': 'Smart Log',
|
| 77 |
-
'cell': 'Cell Analysis',
|
| 78 |
-
'uv': 'UV Verification',
|
| 79 |
-
'weight log': 'Smart Log'
|
| 80 |
-
}
|
| 81 |
-
}
|
| 82 |
-
|
| 83 |
-
# Fetch folder ID for "LabOps Reports"
|
| 84 |
-
def get_folder_id(folder_name):
|
| 85 |
-
if sf is None:
|
| 86 |
-
return None
|
| 87 |
-
try:
|
| 88 |
-
query = f"SELECT Id FROM Folder WHERE Name = '{folder_name}' AND Type = 'Report'"
|
| 89 |
-
result = sf.query(query)
|
| 90 |
-
if result['totalSize'] > 0:
|
| 91 |
-
folder_id = result['records'][0]['Id']
|
| 92 |
-
logging.info(f"Found folder ID for '{folder_name}': {folder_id}")
|
| 93 |
-
return folder_id
|
| 94 |
-
else:
|
| 95 |
-
logging.error(f"Folder '{folder_name}' not found in Salesforce.")
|
| 96 |
-
return None
|
| 97 |
-
except Exception as e:
|
| 98 |
-
logging.error(f"Failed to fetch folder ID for '{folder_name}': {str(e)}")
|
| 99 |
-
return None
|
| 100 |
-
|
| 101 |
-
# Cache the folder ID at startup
|
| 102 |
-
LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
|
| 103 |
-
|
| 104 |
-
# Salesforce report creation (runs in backend, result not displayed)
|
| 105 |
-
def create_salesforce_reports(df):
|
| 106 |
-
if sf is None:
|
| 107 |
-
logging.info("Salesforce connection not available for report creation.")
|
| 108 |
-
return
|
| 109 |
-
if not LABOPS_REPORTS_FOLDER_ID:
|
| 110 |
-
logging.info("Cannot create reports: 'LabOps Reports' folder not found in Salesforce.")
|
| 111 |
-
return
|
| 112 |
-
|
| 113 |
-
try:
|
| 114 |
-
usage_report_metadata = {
|
| 115 |
-
"reportMetadata": {
|
| 116 |
-
"name": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
| 117 |
-
"developerName": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
| 118 |
-
"reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
|
| 119 |
-
"reportFormat": "TABULAR",
|
| 120 |
-
"reportBooleanFilter": None,
|
| 121 |
-
"reportFilters": [],
|
| 122 |
-
"detailColumns": [
|
| 123 |
-
"SmartLog__c.Device_Id__c",
|
| 124 |
-
"SmartLog__c.Usage_Hours__c"
|
| 125 |
-
],
|
| 126 |
-
"folderId": LABOPS_REPORTS_FOLDER_ID
|
| 127 |
-
}
|
| 128 |
-
}
|
| 129 |
-
logging.info(f"Creating Usage Report with metadata: {json.dumps(usage_report_metadata, indent=2)}")
|
| 130 |
-
usage_result = sf.restful('analytics/reports', method='POST', json=usage_report_metadata)
|
| 131 |
-
usage_report_id = usage_result['id']
|
| 132 |
-
logging.info(f"Usage Report created: {usage_report_id}")
|
| 133 |
-
|
| 134 |
-
amc_report_metadata = {
|
| 135 |
-
"reportMetadata": {
|
| 136 |
-
"name": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
| 137 |
-
"developerName": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
| 138 |
-
"reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
|
| 139 |
-
"reportFormat": "TABULAR",
|
| 140 |
-
"reportBooleanFilter": None,
|
| 141 |
-
"reportFilters": [],
|
| 142 |
-
"detailColumns": [
|
| 143 |
-
"SmartLog__c.Device_Id__c",
|
| 144 |
-
"SmartLog__c.AMC_Date__c"
|
| 145 |
-
],
|
| 146 |
-
"folderId": LABOPS_REPORTS_FOLDER_ID
|
| 147 |
-
}
|
| 148 |
-
}
|
| 149 |
-
logging.info(f"Creating AMC Reminders Report with metadata: {json.dumps(amc_report_metadata, indent=2)}")
|
| 150 |
-
amc_result = sf.restful('analytics/reports', method='POST', json=amc_report_metadata)
|
| 151 |
-
amc_report_id = amc_result['id']
|
| 152 |
-
logging.info(f"AMC Reminders Report created: {amc_report_id}")
|
| 153 |
-
except Exception as e:
|
| 154 |
-
logging.error(f"Failed to create Salesforce reports: {str(e)}")
|
| 155 |
-
|
| 156 |
-
# Save results to Salesforce SmartLog__c (runs in backend, result not displayed)
|
| 157 |
-
def save_to_salesforce(df, reminders_df, summary, anomalies, amc_reminders, insights):
|
| 158 |
-
if sf is None:
|
| 159 |
-
logging.info("Salesforce connection not available for saving records.")
|
| 160 |
-
return
|
| 161 |
-
|
| 162 |
-
reminder_records = []
|
| 163 |
-
current_date = datetime.now()
|
| 164 |
-
next_30_days = current_date + timedelta(days=30)
|
| 165 |
-
|
| 166 |
-
if not reminders_df.empty:
|
| 167 |
-
logging.info(f"Processing {len(reminders_df)} AMC reminder records for saving to Salesforce")
|
| 168 |
-
for _, row in reminders_df.iterrows():
|
| 169 |
-
status = str(row['status'])
|
| 170 |
-
log_type = str(row['log_type'])
|
| 171 |
-
|
| 172 |
-
if status not in status_values:
|
| 173 |
-
status = picklist_mapping['Status__c'].get(status.lower(), status_values[0] if status_values else None)
|
| 174 |
-
if status is None:
|
| 175 |
-
logging.warning(f"Skipping reminder record with invalid Status__c: {row['status']}")
|
| 176 |
-
continue
|
| 177 |
-
|
| 178 |
-
if log_type not in log_type_values:
|
| 179 |
-
log_type = picklist_mapping['Log_Type__c'].get(log_type.lower(), log_type_values[0] if log_type_values else None)
|
| 180 |
-
if log_type is None:
|
| 181 |
-
logging.warning(f"Skipping reminder record with invalid Log_Type__c: {row['log_type']}")
|
| 182 |
-
continue
|
| 183 |
-
|
| 184 |
-
amc_date_str = None
|
| 185 |
-
if pd.notna(row['amc_date']):
|
| 186 |
-
try:
|
| 187 |
-
amc_date = pd.to_datetime(row['amc_date'], errors='coerce')
|
| 188 |
-
if pd.isna(amc_date):
|
| 189 |
-
logging.error(f"Failed to parse AMC Date for Device ID {row['device_id']}: {row['amc_date']}")
|
| 190 |
-
else:
|
| 191 |
-
amc_date_str = amc_date.strftime('%Y-%m-%d')
|
| 192 |
-
amc_date_dt = datetime.strptime(amc_date_str, '%Y-%m-%d')
|
| 193 |
-
if status == "Active" and current_date.date() <= amc_date_dt.date() <= next_30_days.date():
|
| 194 |
-
logging.info(f"AMC Reminder record saved: Device ID {row['device_id']}, AMC Date {amc_date_str}")
|
| 195 |
-
except Exception as e:
|
| 196 |
-
logging.error(f"Failed to parse AMC Date for Device ID {row['device_id']}: {str(e)}")
|
| 197 |
-
amc_date_str = None
|
| 198 |
-
|
| 199 |
-
record = {
|
| 200 |
-
'Device_Id__c': str(row['device_id'])[:50],
|
| 201 |
-
'Log_Type__c': log_type,
|
| 202 |
-
'Status__c': status,
|
| 203 |
-
'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None,
|
| 204 |
-
'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0,
|
| 205 |
-
'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0,
|
| 206 |
-
'AMC_Date__c': amc_date_str
|
| 207 |
-
}
|
| 208 |
-
reminder_records.append(record)
|
| 209 |
-
|
| 210 |
-
other_records = []
|
| 211 |
-
reminder_device_ids = set(reminders_df['device_id']) if not reminders_df.empty else set()
|
| 212 |
-
for _, row in df.iterrows():
|
| 213 |
-
if row['device_id'] in reminder_device_ids:
|
| 214 |
-
continue
|
| 215 |
-
|
| 216 |
-
status = str(row['status'])
|
| 217 |
-
log_type = str(row['log_type'])
|
| 218 |
-
|
| 219 |
-
if status not in status_values:
|
| 220 |
-
status = picklist_mapping['Status__c'].get(status.lower(), status_values[0] if status_values else None)
|
| 221 |
-
if status is None:
|
| 222 |
-
logging.warning(f"Skipping record with invalid Status__c: {row['status']}")
|
| 223 |
-
continue
|
| 224 |
-
|
| 225 |
-
if log_type not in log_type_values:
|
| 226 |
-
log_type = picklist_mapping['Log_Type__c'].get(log_type.lower(), log_type_values[0] if log_type_values else None)
|
| 227 |
-
if log_type is None:
|
| 228 |
-
logging.warning(f"Skipping record with invalid Log_Type__c: {row['log_type']}")
|
| 229 |
-
continue
|
| 230 |
-
|
| 231 |
-
amc_date_str = None
|
| 232 |
-
if pd.notna(row['amc_date']):
|
| 233 |
-
try:
|
| 234 |
-
amc_date = pd.to_datetime(row['amc_date'], errors='coerce')
|
| 235 |
-
if pd.isna(amc_date):
|
| 236 |
-
logging.error(f"Failed to parse AMC Date for Device ID {row['device_id']}: {row['amc_date']}")
|
| 237 |
-
else:
|
| 238 |
-
amc_date_str = amc_date.strftime('%Y-%m-%d')
|
| 239 |
-
amc_date_dt = datetime.strptime(amc_date_str, '%Y-%m-%d')
|
| 240 |
-
if status == "Active" and current_date.date() <= amc_date_dt.date() <= next_30_days.date():
|
| 241 |
-
logging.info(f"Record qualifies for AMC Reminders: Device ID {row['device_id']}, AMC Date {amc_date_str}")
|
| 242 |
-
except Exception as e:
|
| 243 |
-
logging.error(f"Failed to parse AMC Date for Device ID {row['device_id']}: {str(e)}")
|
| 244 |
-
amc_date_str = None
|
| 245 |
-
|
| 246 |
-
record = {
|
| 247 |
-
'Device_Id__c': str(row['device_id'])[:50],
|
| 248 |
-
'Log_Type__c': log_type,
|
| 249 |
-
'Status__c': status,
|
| 250 |
-
'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None,
|
| 251 |
-
'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0,
|
| 252 |
-
'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0,
|
| 253 |
-
'AMC_Date__c': amc_date_str
|
| 254 |
-
}
|
| 255 |
-
other_records.append(record)
|
| 256 |
-
|
| 257 |
-
all_records = reminder_records + other_records
|
| 258 |
-
|
| 259 |
-
try:
|
| 260 |
-
if all_records:
|
| 261 |
-
sf.bulk.SmartLog__c.insert(all_records)
|
| 262 |
-
logging.info(f"Saved {len(all_records)} total records to Salesforce (including {len(reminder_records)} AMC reminders)")
|
| 263 |
-
except Exception as e:
|
| 264 |
-
logging.error(f"Failed to save to Salesforce: {str(e)}")
|
| 265 |
-
|
| 266 |
# Generate summary and insights without Hugging Face model
|
| 267 |
def generate_summary_and_insights(df):
|
| 268 |
try:
|
|
@@ -325,6 +91,7 @@ def check_amc_reminders(df, current_date):
|
|
| 325 |
def create_usage_chart(agg_data):
|
| 326 |
try:
|
| 327 |
usage_data = agg_data['usage_per_device']
|
|
|
|
| 328 |
if usage_data.empty:
|
| 329 |
logging.warning("Usage data is empty.")
|
| 330 |
return None
|
|
@@ -384,6 +151,7 @@ def create_usage_chart(agg_data):
|
|
| 384 |
def create_downtime_chart(agg_data):
|
| 385 |
try:
|
| 386 |
downtime_data = agg_data['downtime_per_device']
|
|
|
|
| 387 |
if downtime_data.empty:
|
| 388 |
logging.warning("Downtime data is empty.")
|
| 389 |
return None
|
|
@@ -439,6 +207,164 @@ def create_downtime_chart(agg_data):
|
|
| 439 |
logging.error(f"Failed to create downtime chart: {str(e)}")
|
| 440 |
return None
|
| 441 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 442 |
# Generate Device Cards HTML
|
| 443 |
def generate_device_cards(df):
|
| 444 |
try:
|
|
@@ -599,6 +525,65 @@ def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights, d
|
|
| 599 |
logging.error(f"Failed to generate PDF: {str(e)}", exc_info=True)
|
| 600 |
return None
|
| 601 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 602 |
# Main Gradio function
|
| 603 |
async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, month_filter, last_modified_state):
|
| 604 |
try:
|
|
@@ -692,12 +677,14 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
|
|
| 692 |
}
|
| 693 |
|
| 694 |
# Run tasks concurrently
|
| 695 |
-
with ThreadPoolExecutor(max_workers=
|
| 696 |
future_summary_insights = executor.submit(generate_summary_and_insights, filtered_df)
|
| 697 |
future_anomalies = executor.submit(detect_anomalies, filtered_df)
|
| 698 |
future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
|
| 699 |
future_usage_chart = executor.submit(create_usage_chart, agg_data)
|
| 700 |
future_downtime_chart = executor.submit(create_downtime_chart, agg_data)
|
|
|
|
|
|
|
| 701 |
future_device_cards = executor.submit(generate_device_cards, filtered_df)
|
| 702 |
|
| 703 |
summary, insights = future_summary_insights.result()
|
|
@@ -709,11 +696,13 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
|
|
| 709 |
amc_reminders = f"AMC Reminders\n{amc_reminders}"
|
| 710 |
usage_chart = future_usage_chart.result()
|
| 711 |
downtime_chart = future_downtime_chart.result()
|
| 712 |
-
daily_log_chart =
|
| 713 |
-
weekly_uptime_chart =
|
| 714 |
-
anomaly_alerts_chart = None
|
| 715 |
device_cards = future_device_cards.result()
|
| 716 |
|
|
|
|
|
|
|
|
|
|
| 717 |
# Generate the log preview as an HTML table
|
| 718 |
preview_html = """
|
| 719 |
<style>
|
|
@@ -802,6 +791,9 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
|
|
| 802 |
else:
|
| 803 |
status_msg = "Analysis completed, but some data is missing for PDF generation."
|
| 804 |
|
|
|
|
|
|
|
|
|
|
| 805 |
elapsed_time = (datetime.now() - start_time).total_seconds()
|
| 806 |
logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
|
| 807 |
if elapsed_time > 10:
|
|
@@ -868,7 +860,7 @@ try:
|
|
| 868 |
.dashboard-section ul {margin: 2px 0; padding-left: 20px;}
|
| 869 |
""") as iface:
|
| 870 |
gr.Markdown("<h1>LabOps Log Analyzer Dashboard</h1>")
|
| 871 |
-
gr.Markdown("Upload a CSV file to analyze. Click 'Analyze' to refresh the dashboard with the latest data. A PDF report will be generated automatically.")
|
| 872 |
|
| 873 |
last_modified_state = gr.State(value=None)
|
| 874 |
summary_state = gr.State()
|
|
|
|
| 1 |
"""
|
| 2 |
+
LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, and email alerts
|
| 3 |
"""
|
| 4 |
import gradio as gr
|
| 5 |
import pandas as pd
|
|
|
|
| 8 |
import plotly.express as px
|
| 9 |
from sklearn.ensemble import IsolationForest
|
| 10 |
from concurrent.futures import ThreadPoolExecutor
|
|
|
|
| 11 |
import os
|
|
|
|
| 12 |
import io
|
| 13 |
+
import smtplib
|
| 14 |
+
from email.mime.text import MIMEText
|
| 15 |
+
from email.mime.multipart import MIMEMultipart
|
| 16 |
+
from email.mime.application import MIMEApplication
|
| 17 |
|
| 18 |
# Configure logging
|
| 19 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
# Try to import reportlab
|
| 22 |
try:
|
| 23 |
from reportlab.lib.pagesizes import letter
|
|
|
|
| 29 |
logging.warning("reportlab module not found. PDF generation disabled.")
|
| 30 |
reportlab_available = False
|
| 31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
# Generate summary and insights without Hugging Face model
|
| 33 |
def generate_summary_and_insights(df):
|
| 34 |
try:
|
|
|
|
| 91 |
def create_usage_chart(agg_data):
|
| 92 |
try:
|
| 93 |
usage_data = agg_data['usage_per_device']
|
| 94 |
+
logging.info(f"Usage data for chart: {usage_data.to_string()}")
|
| 95 |
if usage_data.empty:
|
| 96 |
logging.warning("Usage data is empty.")
|
| 97 |
return None
|
|
|
|
| 151 |
def create_downtime_chart(agg_data):
|
| 152 |
try:
|
| 153 |
downtime_data = agg_data['downtime_per_device']
|
| 154 |
+
logging.info(f"Downtime data for chart: {downtime_data.to_string()}")
|
| 155 |
if downtime_data.empty:
|
| 156 |
logging.warning("Downtime data is empty.")
|
| 157 |
return None
|
|
|
|
| 207 |
logging.error(f"Failed to create downtime chart: {str(e)}")
|
| 208 |
return None
|
| 209 |
|
| 210 |
+
# Create Daily Log Trends chart
|
| 211 |
+
def create_daily_log_trends_chart(df):
|
| 212 |
+
try:
|
| 213 |
+
if df.empty or 'timestamp' not in df.columns:
|
| 214 |
+
logging.warning("DataFrame is empty or missing 'timestamp' column for Daily Log Trends.")
|
| 215 |
+
return None
|
| 216 |
+
|
| 217 |
+
# Group by date to count logs per day
|
| 218 |
+
df['date'] = df['timestamp'].dt.date
|
| 219 |
+
log_counts = df.groupby('date').size().reset_index(name='log_count')
|
| 220 |
+
|
| 221 |
+
fig = px.line(
|
| 222 |
+
log_counts,
|
| 223 |
+
x='date',
|
| 224 |
+
y='log_count',
|
| 225 |
+
title="Daily Log Trends",
|
| 226 |
+
labels={"date": "Date", "log_count": "Number of Logs"}
|
| 227 |
+
)
|
| 228 |
+
fig.update_traces(
|
| 229 |
+
line_color='#4ECDC4',
|
| 230 |
+
line_width=2
|
| 231 |
+
)
|
| 232 |
+
fig.update_layout(
|
| 233 |
+
title_font=dict(size=18, family="Arial", color="#333333"),
|
| 234 |
+
font=dict(family="Arial", size=12, color="#333333"),
|
| 235 |
+
plot_bgcolor="white",
|
| 236 |
+
paper_bgcolor="white",
|
| 237 |
+
margin=dict(l=30, r=30, t=50, b=30),
|
| 238 |
+
xaxis=dict(
|
| 239 |
+
title="Date",
|
| 240 |
+
showgrid=False,
|
| 241 |
+
title_font=dict(size=14),
|
| 242 |
+
tickfont=dict(size=12)
|
| 243 |
+
),
|
| 244 |
+
yaxis=dict(
|
| 245 |
+
title="Number of Logs",
|
| 246 |
+
gridcolor="#E5E5E5",
|
| 247 |
+
gridwidth=1,
|
| 248 |
+
title_font=dict(size=14),
|
| 249 |
+
tickfont=dict(size=12)
|
| 250 |
+
)
|
| 251 |
+
)
|
| 252 |
+
return fig
|
| 253 |
+
except Exception as e:
|
| 254 |
+
logging.error(f"Failed to create Daily Log Trends chart: {str(e)}")
|
| 255 |
+
return None
|
| 256 |
+
|
| 257 |
+
# Create Weekly Uptime Percentage chart
|
| 258 |
+
def create_weekly_uptime_chart(df):
|
| 259 |
+
try:
|
| 260 |
+
if df.empty or 'timestamp' not in df.columns or 'downtime' not in df.columns:
|
| 261 |
+
logging.warning("DataFrame is empty or missing required columns for Weekly Uptime Percentage.")
|
| 262 |
+
return None
|
| 263 |
+
|
| 264 |
+
# Group by week
|
| 265 |
+
df['week'] = df['timestamp'].dt.isocalendar().week
|
| 266 |
+
df['year'] = df['timestamp'].dt.year
|
| 267 |
+
weekly_data = df.groupby(['year', 'week']).agg({
|
| 268 |
+
'downtime': 'sum'
|
| 269 |
+
}).reset_index()
|
| 270 |
+
|
| 271 |
+
# Calculate uptime percentage (assuming 24*7 = 168 hours per week)
|
| 272 |
+
total_hours_per_week = 168
|
| 273 |
+
weekly_data['uptime_percentage'] = ((total_hours_per_week - weekly_data['downtime']) / total_hours_per_week) * 100
|
| 274 |
+
weekly_data['week_label'] = weekly_data.apply(lambda x: f"{x['year']}-W{x['week']:02d}", axis=1)
|
| 275 |
+
|
| 276 |
+
fig = px.bar(
|
| 277 |
+
weekly_data,
|
| 278 |
+
x='week_label',
|
| 279 |
+
y='uptime_percentage',
|
| 280 |
+
title="Weekly Uptime Percentage",
|
| 281 |
+
labels={"week_label": "Week", "uptime_percentage": "Uptime Percentage (%)"},
|
| 282 |
+
color='uptime_percentage',
|
| 283 |
+
color_continuous_scale=['#FF0000', '#96CEB4']
|
| 284 |
+
)
|
| 285 |
+
fig.update_traces(
|
| 286 |
+
marker_line_color='#333333',
|
| 287 |
+
marker_line_width=1.5,
|
| 288 |
+
opacity=0.9
|
| 289 |
+
)
|
| 290 |
+
fig.update_layout(
|
| 291 |
+
title_font=dict(size=18, family="Arial", color="#333333"),
|
| 292 |
+
font=dict(family="Arial", size=12, color="#333333"),
|
| 293 |
+
plot_bgcolor="white",
|
| 294 |
+
paper_bgcolor="white",
|
| 295 |
+
margin=dict(l=30, r=30, t=50, b=30),
|
| 296 |
+
xaxis=dict(
|
| 297 |
+
title="Week",
|
| 298 |
+
showgrid=False,
|
| 299 |
+
tickangle=45,
|
| 300 |
+
title_font=dict(size=14),
|
| 301 |
+
tickfont=dict(size=12)
|
| 302 |
+
),
|
| 303 |
+
yaxis=dict(
|
| 304 |
+
title="Uptime Percentage (%)",
|
| 305 |
+
gridcolor="#E5E5E5",
|
| 306 |
+
gridwidth=1,
|
| 307 |
+
title_font=dict(size=14),
|
| 308 |
+
tickfont=dict(size=12)
|
| 309 |
+
),
|
| 310 |
+
showlegend=False,
|
| 311 |
+
bargap=0.2
|
| 312 |
+
)
|
| 313 |
+
return fig
|
| 314 |
+
except Exception as e:
|
| 315 |
+
logging.error(f"Failed to create Weekly Uptime Percentage chart: {str(e)}")
|
| 316 |
+
return None
|
| 317 |
+
|
| 318 |
+
# Create Anomaly Alerts chart
|
| 319 |
+
def create_anomaly_alerts_chart(df, anomalies_df):
|
| 320 |
+
try:
|
| 321 |
+
if df.empty or anomalies_df.empty:
|
| 322 |
+
logging.warning("DataFrame or anomalies DataFrame is empty for Anomaly Alerts chart.")
|
| 323 |
+
return None
|
| 324 |
+
|
| 325 |
+
# Prepare data for scatter plot
|
| 326 |
+
df['is_anomaly'] = df.index.isin(anomalies_df.index)
|
| 327 |
+
df['color'] = df['is_anomaly'].map({True: 'red', False: 'blue'})
|
| 328 |
+
|
| 329 |
+
fig = px.scatter(
|
| 330 |
+
df,
|
| 331 |
+
x='usage_hours',
|
| 332 |
+
y='downtime',
|
| 333 |
+
color='color',
|
| 334 |
+
title="Anomaly Alerts (Red = Anomaly)",
|
| 335 |
+
labels={"usage_hours": "Usage Hours", "downtime": "Downtime (Hours)"},
|
| 336 |
+
color_discrete_map={'blue': '#4ECDC4', 'red': '#FF0000'}
|
| 337 |
+
)
|
| 338 |
+
fig.update_traces(
|
| 339 |
+
marker=dict(size=8, line=dict(width=1, color='#333333')),
|
| 340 |
+
opacity=0.7
|
| 341 |
+
)
|
| 342 |
+
fig.update_layout(
|
| 343 |
+
title_font=dict(size=18, family="Arial", color="#333333"),
|
| 344 |
+
font=dict(family="Arial", size=12, color="#333333"),
|
| 345 |
+
plot_bgcolor="white",
|
| 346 |
+
paper_bgcolor="white",
|
| 347 |
+
margin=dict(l=30, r=30, t=50, b=30),
|
| 348 |
+
xaxis=dict(
|
| 349 |
+
title="Usage Hours",
|
| 350 |
+
showgrid=False,
|
| 351 |
+
title_font=dict(size=14),
|
| 352 |
+
tickfont=dict(size=12)
|
| 353 |
+
),
|
| 354 |
+
yaxis=dict(
|
| 355 |
+
title="Downtime (Hours)",
|
| 356 |
+
gridcolor="#E5E5E5",
|
| 357 |
+
gridwidth=1,
|
| 358 |
+
title_font=dict(size=14),
|
| 359 |
+
tickfont=dict(size=12)
|
| 360 |
+
),
|
| 361 |
+
showlegend=False
|
| 362 |
+
)
|
| 363 |
+
return fig
|
| 364 |
+
except Exception as e:
|
| 365 |
+
logging.error(f"Failed to create Anomaly Alerts chart: {str(e)}")
|
| 366 |
+
return None
|
| 367 |
+
|
| 368 |
# Generate Device Cards HTML
|
| 369 |
def generate_device_cards(df):
|
| 370 |
try:
|
|
|
|
| 525 |
logging.error(f"Failed to generate PDF: {str(e)}", exc_info=True)
|
| 526 |
return None
|
| 527 |
|
| 528 |
+
# Send email alert with analysis summary and PDF attachment
|
| 529 |
+
def send_email_alert(summary, anomalies, amc_reminders, pdf_path, recipient_email="recipient@example.com"):
|
| 530 |
+
try:
|
| 531 |
+
# Email configuration
|
| 532 |
+
sender_email = "your_email@gmail.com" # Replace with your email
|
| 533 |
+
sender_password = "your_app_password" # Replace with your app-specific password
|
| 534 |
+
smtp_server = "smtp.gmail.com"
|
| 535 |
+
smtp_port = 587
|
| 536 |
+
|
| 537 |
+
# Create email message
|
| 538 |
+
subject = "LabOps Log Analyzer Report - Analysis Completed"
|
| 539 |
+
body = f"""
|
| 540 |
+
Dear Recipient,
|
| 541 |
+
|
| 542 |
+
The LabOps Log Analyzer has completed its analysis. Below are the key findings:
|
| 543 |
+
|
| 544 |
+
**Summary:**
|
| 545 |
+
{summary}
|
| 546 |
+
|
| 547 |
+
**Anomalies Detected:**
|
| 548 |
+
{anomalies}
|
| 549 |
+
|
| 550 |
+
**AMC Reminders:**
|
| 551 |
+
{amc_reminders}
|
| 552 |
+
|
| 553 |
+
The full report is attached as a PDF for your review.
|
| 554 |
+
|
| 555 |
+
Regards,
|
| 556 |
+
LabOps Team
|
| 557 |
+
"""
|
| 558 |
+
|
| 559 |
+
msg = MIMEMultipart()
|
| 560 |
+
msg['From'] = sender_email
|
| 561 |
+
msg['To'] = recipient_email
|
| 562 |
+
msg['Subject'] = subject
|
| 563 |
+
msg.attach(MIMEText(body, 'plain'))
|
| 564 |
+
|
| 565 |
+
# Attach the PDF if it exists
|
| 566 |
+
if pdf_path and os.path.exists(pdf_path):
|
| 567 |
+
with open(pdf_path, 'rb') as f:
|
| 568 |
+
pdf_attachment = MIMEApplication(f.read(), _subtype="pdf")
|
| 569 |
+
pdf_attachment.add_header(
|
| 570 |
+
'Content-Disposition', 'attachment', filename=os.path.basename(pdf_path)
|
| 571 |
+
)
|
| 572 |
+
msg.attach(pdf_attachment)
|
| 573 |
+
logging.info(f"Attached PDF to email: {pdf_path}")
|
| 574 |
+
else:
|
| 575 |
+
logging.warning("No PDF file to attach to email.")
|
| 576 |
+
|
| 577 |
+
# Send the email
|
| 578 |
+
with smtplib.SMTP(smtp_server, smtp_port) as server:
|
| 579 |
+
server.starttls()
|
| 580 |
+
server.login(sender_email, sender_password)
|
| 581 |
+
server.sendmail(sender_email, recipient_email, msg.as_string())
|
| 582 |
+
|
| 583 |
+
logging.info(f"Email alert sent to {recipient_email}")
|
| 584 |
+
except Exception as e:
|
| 585 |
+
logging.error(f"Failed to send email alert: {str(e)}")
|
| 586 |
+
|
| 587 |
# Main Gradio function
|
| 588 |
async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, month_filter, last_modified_state):
|
| 589 |
try:
|
|
|
|
| 677 |
}
|
| 678 |
|
| 679 |
# Run tasks concurrently
|
| 680 |
+
with ThreadPoolExecutor(max_workers=3) as executor:
|
| 681 |
future_summary_insights = executor.submit(generate_summary_and_insights, filtered_df)
|
| 682 |
future_anomalies = executor.submit(detect_anomalies, filtered_df)
|
| 683 |
future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
|
| 684 |
future_usage_chart = executor.submit(create_usage_chart, agg_data)
|
| 685 |
future_downtime_chart = executor.submit(create_downtime_chart, agg_data)
|
| 686 |
+
future_daily_log_chart = executor.submit(create_daily_log_trends_chart, filtered_df)
|
| 687 |
+
future_weekly_uptime_chart = executor.submit(create_weekly_uptime_chart, filtered_df)
|
| 688 |
future_device_cards = executor.submit(generate_device_cards, filtered_df)
|
| 689 |
|
| 690 |
summary, insights = future_summary_insights.result()
|
|
|
|
| 696 |
amc_reminders = f"AMC Reminders\n{amc_reminders}"
|
| 697 |
usage_chart = future_usage_chart.result()
|
| 698 |
downtime_chart = future_downtime_chart.result()
|
| 699 |
+
daily_log_chart = future_daily_log_chart.result()
|
| 700 |
+
weekly_uptime_chart = future_weekly_uptime_chart.result()
|
|
|
|
| 701 |
device_cards = future_device_cards.result()
|
| 702 |
|
| 703 |
+
# Generate Anomaly Alerts chart after anomalies are detected
|
| 704 |
+
anomaly_alerts_chart = create_anomaly_alerts_chart(filtered_df, anomalies_df)
|
| 705 |
+
|
| 706 |
# Generate the log preview as an HTML table
|
| 707 |
preview_html = """
|
| 708 |
<style>
|
|
|
|
| 791 |
else:
|
| 792 |
status_msg = "Analysis completed, but some data is missing for PDF generation."
|
| 793 |
|
| 794 |
+
# Send email alert
|
| 795 |
+
send_email_alert(summary, anomalies, amc_reminders, pdf_file)
|
| 796 |
+
|
| 797 |
elapsed_time = (datetime.now() - start_time).total_seconds()
|
| 798 |
logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
|
| 799 |
if elapsed_time > 10:
|
|
|
|
| 860 |
.dashboard-section ul {margin: 2px 0; padding-left: 20px;}
|
| 861 |
""") as iface:
|
| 862 |
gr.Markdown("<h1>LabOps Log Analyzer Dashboard</h1>")
|
| 863 |
+
gr.Markdown("Upload a CSV file to analyze. Click 'Analyze' to refresh the dashboard with the latest data. A PDF report will be generated automatically, and an email alert will be sent.")
|
| 864 |
|
| 865 |
last_modified_state = gr.State(value=None)
|
| 866 |
summary_state = gr.State()
|