import gradio as gr import pandas as pd from datetime import datetime, timedelta import logging from sklearn.ensemble import IsolationForest from concurrent.futures import ThreadPoolExecutor import os import io import time import asyncio from simple_salesforce import Salesforce from reportlab.lib.pagesizes import letter from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle from reportlab.lib.styles import getSampleStyleSheet from reportlab.lib import colors # Configure logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # Check and import required libraries required_libs = { "pandas": "pandas>=1.0.0", "plotly": "plotly>=5.0.0", "reportlab": "reportlab>=3.0.0", "scikit-learn": "scikit-learn>=0.24.0" } missing_libs = [] for lib, version in required_libs.items(): try: __import__(lib) logging.info(f"{lib} module successfully imported") except ImportError: logging.warning(f"{lib} module not found. Install {version} for full functionality.") missing_libs.append(lib) # Try to import plotly try: import plotly.express as px import plotly.graph_objects as go plotly_available = True logging.info("plotly module successfully imported") except ImportError: logging.warning("plotly module not found. Chart generation disabled.") plotly_available = False # Try to import reportlab try: reportlab_available = True logging.info("reportlab module successfully imported") except ImportError: logging.warning("reportlab module not found. PDF generation disabled.") reportlab_available = False # Salesforce configuration try: sf = Salesforce( username='multi-devicelabopsdashboard@sathkrutha.com', password='Team@1234', security_token=os.getenv('SF_SECURITY_TOKEN', ''), domain='login' ) logging.info("Salesforce connection established") except Exception as e: logging.error(f"Failed to connect to Salesforce: {str(e)}") sf = None # Cache picklist values at startup def get_picklist_values(field_name): if sf is None: return [] try: obj_desc = sf.SmartLog__c.describe() for field in obj_desc['fields']: if field['name'] == field_name: return [value['value'] for value in field['picklistValues'] if value['active']] return [] except Exception as e: logging.error(f"Failed to fetch picklist values for {field_name}: {str(e)}") return [] status_values = get_picklist_values('Status__c') or ["Active", "Inactive", "Pending"] log_type_values = get_picklist_values('Log_Type__c') or ["Smart Log", "Cell Analysis", "UV Verification"] logging.info(f"Valid Status__c values: {status_values}") logging.info(f"Valid Log_Type__c values: {log_type_values}") # Map invalid picklist values picklist_mapping = { 'Status__c': { 'normal': 'Active', 'error': 'Inactive', 'warning': 'Pending', 'ok': 'Active', 'failed': 'Inactive' }, 'Log_Type__c': { 'maint': 'Smart Log', 'error': 'Cell Analysis', 'ops': 'UV Verification', 'maintenance': 'Smart Log', 'cell': 'Cell Analysis', 'uv': 'UV Verification', 'weight log': 'Smart Log' } } # Cache folder ID for Salesforce reports def get_folder_id(folder_name): if sf is None: return None try: query = f"SELECT Id FROM Folder WHERE Name = '{folder_name}' AND Type = 'Report'" result = sf.query(query) if result['totalSize'] > 0: folder_id = result['records'][0]['Id'] logging.info(f"Found folder ID for '{folder_name}': {folder_id}") return folder_id else: logging.error(f"Folder '{folder_name}' not found in Salesforce.") return None except Exception as e: logging.error(f"Failed to fetch folder ID for '{folder_name}': {str(e)}") return None LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports') # Salesforce report creation def create_salesforce_reports(df): if sf is None or not LABOPS_REPORTS_FOLDER_ID: logging.error("Cannot create Salesforce reports: No connection or folder ID") return try: timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') reports = [ { "reportMetadata": { "name": f"SmartLog_Usage_Report_{timestamp}", "developerName": f"SmartLog_Usage_Report_{timestamp}", "reportType": {"type": "CustomEntity", "value": "SmartLog__c"}, "reportFormat": "TABULAR", "reportBooleanFilter": None, "reportFilters": [], "detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.Usage_Hours__c"], "folderId": LABOPS_REPORTS_FOLDER_ID } }, { "reportMetadata": { "name": f"SmartLog_AMC_Reminders_{timestamp}", "developerName": f"SmartLog_AMC_Reminders_{timestamp}", "reportType": {"type": "CustomEntity", "value": "SmartLog__c"}, "reportFormat": "TABULAR", "reportBooleanFilter": None, "reportFilters": [], "detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.AMC_Date__c"], "folderId": LABOPS_REPORTS_FOLDER_ID } } ] for report in reports: sf.restful('analytics/reports', method='POST', json=report) logging.info("Salesforce reports created successfully") except Exception as e: logging.error(f"Failed to create Salesforce reports: {str(e)}") # Save to Salesforce def save_to_salesforce(df, reminders_df): if sf is None: logging.error("No Salesforce connection available") return try: logging.info("Starting Salesforce save operation") current_date = datetime.now() next_30_days = current_date + timedelta(days=30) records = [] reminder_device_ids = set(reminders_df['device_id']) if not reminders_df.empty else set() logging.info(f"Processing {len(df)} records for Salesforce") for idx, row in df.iterrows(): status = str(row['status']).lower() log_type = str(row['log_type']).lower() status_mapped = picklist_mapping['Status__c'].get(status, status_values[0] if status_values else 'Active') log_type_mapped = picklist_mapping['Log_Type__c'].get(log_type, log_type_values[0] if log_type_values else 'Smart Log') if not status_mapped or not log_type_mapped: logging.warning(f"Skipping record {idx}: Invalid status ({status}) or log_type ({log_type})") continue amc_date_str = None if pd.notna(row['amc_date']): try: amc_date = pd.to_datetime(row['amc_date']).strftime('%Y-%m-%d') amc_date_str = amc_date amc_date_dt = datetime.strptime(amc_date, '%Y-%m-%d') if status_mapped == "Active" and current_date.date() <= amc_date_dt.date() <= next_30_days.date(): logging.info(f"AMC Reminder for Device ID {row['device_id']}: {amc_date}") except Exception as e: logging.warning(f"Invalid AMC date for Device ID {row['device_id']}: {str(e)}") record = { 'Device_Id__c': str(row['device_id'])[:50], 'Log_Type__c': log_type_mapped, 'Status__c': status_mapped, 'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None, 'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0, 'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0, 'AMC_Date__c': amc_date_str } records.append(record) if records: batch_size = 100 for i in range(0, len(records), batch_size): batch = records[i:i + batch_size] try: result = sf.bulk.SmartLog__c.insert(batch) logging.info(f"Saved {len(batch)} records to Salesforce in batch {i//batch_size + 1}") for res in result: if not res['success']: logging.error(f"Failed to save record: {res['errors']}") except Exception as e: logging.error(f"Failed to save batch {i//batch_size + 1}: {str(e)}") else: logging.warning("No records to save to Salesforce") except Exception as e: logging.error(f"Failed to save to Salesforce: {str(e)}") # Summarize logs def summarize_logs(df): try: total_devices = df["device_id"].nunique() total_usage = df["usage_hours"].sum() if "usage_hours" in df.columns else 0 return f"{total_devices} devices processed with {total_usage:.2f} total usage hours." except Exception as e: logging.error(f"Summary generation failed: {str(e)}") return "Failed to generate summary." # Anomaly detection def detect_anomalies(df): try: if "usage_hours" not in df.columns or "downtime" not in df.columns: return "Anomaly detection requires 'usage_hours' and 'downtime' columns.", pd.DataFrame() features = df[["usage_hours", "downtime"]].fillna(0) if len(features) > 50: features = features.sample(n=50, random_state=42) iso_forest = IsolationForest(contamination=0.1, random_state=42) df["anomaly"] = iso_forest.fit_predict(features) anomalies = df[df["anomaly"] == -1][["device_id", "usage_hours", "downtime", "timestamp"]] if anomalies.empty: return "No anomalies detected.", anomalies return "\n".join([f"- Device ID: {row['device_id']}, Usage: {row['usage_hours']}, Downtime: {row['downtime']}, Timestamp: {row['timestamp']}" for _, row in anomalies.head(5).iterrows()]), anomalies except Exception as e: logging.error(f"Anomaly detection failed: {str(e)}") return f"Anomaly detection failed: {str(e)}", pd.DataFrame() # AMC reminders def check_amc_reminders(df, current_date): try: if "device_id" not in df.columns or "amc_date" not in df.columns: return "AMC reminders require 'device_id' and 'amc_date' columns.", pd.DataFrame() df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce') current_date = pd.to_datetime(current_date) df["days_to_amc"] = (df["amc_date"] - current_date).dt.days reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]] if reminders.empty: return "No AMC reminders due within the next 30 days.", reminders return "\n".join([f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}" for _, row in reminders.head(5).iterrows()]), reminders except Exception as e: logging.error(f"AMC reminder generation failed: {str(e)}") return f"AMC reminder generation failed: {str(e)}", pd.DataFrame() # Dashboard insights def generate_dashboard_insights(df): try: total_devices = df["device_id"].nunique() avg_usage = df["usage_hours"].mean() if "usage_hours" in df.columns else 0 return f"{total_devices} devices with average usage of {avg_usage:.2f} hours." except Exception as e: logging.error(f"Dashboard insights generation failed: {str(e)}") return "Failed to generate insights." # Placeholder chart for empty data or missing plotly def create_placeholder_chart(title): if not plotly_available: logging.warning(f"Cannot create chart '{title}': plotly not available") return None try: fig = go.Figure() fig.add_annotation( text="No data available for this chart", xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False, font=dict(size=16) ) fig.update_layout(title=title, margin=dict(l=20, r=20, t=40, b=20)) return fig except Exception as e: logging.error(f"Failed to create placeholder chart '{title}': {str(e)}") return None # Create usage chart def create_usage_chart(df): if not plotly_available: logging.warning("Cannot create usage chart: plotly not available") return None try: if df.empty or "usage_hours" not in df.columns or "device_id" not in df.columns: logging.warning("Insufficient data for usage chart") return create_placeholder_chart("Usage Hours per Device") usage_data = df.groupby("device_id")["usage_hours"].sum().reset_index() if len(usage_data) > 5: usage_data = usage_data.nlargest(5, "usage_hours") fig = px.bar( usage_data, x="device_id", y="usage_hours", title="Usage Hours per Device", labels={"device_id": "Device ID", "usage_hours": "Usage Hours"} ) fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20)) return fig except Exception as e: logging.error(f"Failed to create usage chart: {str(e)}") return create_placeholder_chart("Usage Hours per Device") # Create downtime chart def create_downtime_chart(df): if not plotly_available: logging.warning("Cannot create downtime chart: plotly not available") return None try: if df.empty or "downtime" not in df.columns or "device_id" not in df.columns: logging.warning("Insufficient data for downtime chart") return create_placeholder_chart("Downtime per Device") downtime_data = df.groupby("device_id")["downtime"].sum().reset_index() if len(downtime_data) > 5: downtime_data = downtime_data.nlargest(5, "downtime") fig = px.bar( downtime_data, x="device_id", y="downtime", title="Downtime per Device", labels={"device_id": "Device ID", "downtime": "Downtime (Hours)"} ) fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20)) return fig except Exception as e: logging.error(f"Failed to create downtime chart: {str(e)}") return create_placeholder_chart("Downtime per Device") # Create daily log trends chart def create_daily_log_trends_chart(df): if not plotly_available: logging.warning("Cannot create daily log trends chart: plotly not available") return None try: if df.empty or "timestamp" not in df.columns: logging.warning("Insufficient data for daily log trends chart") return create_placeholder_chart("Daily Log Trends") df['date'] = pd.to_datetime(df['timestamp'], errors='coerce').dt.date daily_logs = df.groupby('date').size().reset_index(name='log_count') if daily_logs.empty: return create_placeholder_chart("Daily Log Trends") fig = px.line( daily_logs, x='date', y='log_count', title="Daily Log Trends", labels={"date": "Date", "log_count": "Number of Logs"} ) fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20)) return fig except Exception as e: logging.error(f"Failed to create daily log trends chart: {str(e)}") return create_placeholder_chart("Daily Log Trends") # Create weekly uptime chart def create_weekly_uptime_chart(df): if not plotly_available: logging.warning("Cannot create weekly uptime chart: plotly not available") return None try: if df.empty or "timestamp" not in df.columns or "usage_hours" not in df.columns or "downtime" not in df.columns: logging.warning("Insufficient data for weekly uptime chart") return create_placeholder_chart("Weekly Uptime Percentage") df['week'] = pd.to_datetime(df['timestamp'], errors='coerce').dt.isocalendar().week df['year'] = pd.to_datetime(df['timestamp'], errors='coerce').dt.year weekly_data = df.groupby(['year', 'week']).agg({ 'usage_hours': 'sum', 'downtime': 'sum' }).reset_index() weekly_data['uptime_percent'] = (weekly_data['usage_hours'] / (weekly_data['usage_hours'] + weekly_data['downtime'])) * 100 weekly_data['year_week'] = weekly_data['year'].astype(str) + '-W' + weekly_data['week'].astype(str) if weekly_data.empty: return create_placeholder_chart("Weekly Uptime Percentage") fig = px.bar( weekly_data, x='year_week', y='uptime_percent', title="Weekly Uptime Percentage", labels={"year_week": "Year-Week", "uptime_percent": "Uptime %"} ) fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20)) return fig except Exception as e: logging.error(f"Failed to create weekly uptime chart: {str(e)}") return create_placeholder_chart("Weekly Uptime Percentage") # Create anomaly alerts chart def create_anomaly_alerts_chart(anomalies_df): if not plotly_available: logging.warning("Cannot create anomaly alerts chart: plotly not available") return None try: if anomalies_df is None or anomalies_df.empty or "timestamp" not in anomalies_df.columns: logging.warning("Insufficient data for anomaly alerts chart") return create_placeholder_chart("Anomaly Alerts Over Time") anomalies_df['date'] = pd.to_datetime(anomalies_df['timestamp'], errors='coerce').dt.date anomaly_counts = anomalies_df.groupby('date').size().reset_index(name='anomaly_count') if anomaly_counts.empty: return create_placeholder_chart("Anomaly Alerts Over Time") fig = px.scatter( anomaly_counts, x='date', y='anomaly_count', title="Anomaly Alerts Over Time", labels={"date": "Date", "anomaly_count": "Number of Anomalies"} ) fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20)) return fig except Exception as e: logging.error(f"Failed to create anomaly alerts chart: {str(e)}") return create_placeholder_chart("Anomaly Alerts Over Time") # Generate device cards def generate_device_cards(df): try: if df.empty: return '

No devices available to display.

' device_stats = df.groupby('device_id').agg({ 'status': 'last', 'timestamp': 'max', }).reset_index() device_stats['count'] = df.groupby('device_id').size().reindex(device_stats['device_id']).values device_stats['health'] = device_stats['status'].map({ 'Active': 'Healthy', 'Inactive': 'Unhealthy', 'Pending': 'Warning' }).fillna('Unknown') cards_html = '
' for _, row in device_stats.iterrows(): health_color = {'Healthy': 'green', 'Unhealthy': 'red', 'Warning': 'orange', 'Unknown': 'gray'}.get(row['health'], 'gray') timestamp_str = str(row['timestamp']) if pd.notna(row['timestamp']) else 'Unknown' cards_html += f"""

Device: {row['device_id']}

Health: {row['health']}

Usage Count: {row['count']}

Last Log: {timestamp_str}

""" cards_html += '
' return cards_html except Exception as e: logging.error(f"Failed to generate device cards: {str(e)}") return f'

Error generating device cards: {str(e)}

' # Generate PDF content def generate_pdf_content(summary, preview_html, anomalies, amc_reminders, insights, device_cards_html, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart): if not reportlab_available: logging.warning("PDF generation disabled: reportlab not available") return None try: pdf_path = f"status_report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf" doc = SimpleDocTemplate(pdf_path, pagesize=letter) styles = getSampleStyleSheet() story = [] logging.info("Starting PDF generation with summary: %s", summary) def safe_paragraph(text, style): cleaned_text = str(text).replace('\n', '
') if text else "No data available" return Paragraph(cleaned_text, style) story.append(Paragraph("LabOps Status Report", styles['Title'])) story.append(Paragraph(f"Generated on {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("Summary Report", styles['Heading2'])) story.append(safe_paragraph(summary, styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("Log Preview", styles['Heading2'])) preview_df = pd.DataFrame() if not preview_html else pd.read_html(preview_html)[0] if pd.read_html(preview_html, flavor='bs4') else pd.DataFrame() logging.info("Preview DF shape: %s", preview_df.shape if not preview_df.empty else "Empty") if not preview_df.empty: data = [preview_df.columns.tolist()] + preview_df.head(5).values.tolist() table = Table(data) table.setStyle(TableStyle([ ('BACKGROUND', (0, 0), (-1, 0), colors.grey), ('TEXTCOLOR', (0, 0), (-1, 0), colors.whitesmoke), ('ALIGN', (0, 0), (-1, -1), 'CENTER'), ('FONTNAME', (0, 0), (-1, 0), 'Helvetica-Bold'), ('FONTSIZE', (0, 0), (-1, 0), 12), ('BOTTOMPADDING', (0, 0), (-1, 0), 12), ('BACKGROUND', (0, 1), (-1, -1), colors.beige), ('TEXTCOLOR', (0, 1), (-1, -1), colors.black), ('FONTNAME', (0, 1), (-1, -1), 'Helvetica'), ('FONTSIZE', (0, 1), (-1, -1), 10), ('GRID', (0, 0), (-1, -1), 1, colors.black) ])) story.append(table) else: story.append(safe_paragraph("No preview available.", styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("Device Cards", styles['Heading2'])) device_cards_text = device_cards_html.replace('
', '').replace('
', '\n').replace('

', '').replace('

', '\n').replace('

', '').replace('

', '\n').replace('', '').replace('', '').replace('', '').replace('', '').replace('', '').replace('', '').replace('', '') if device_cards_html else "No device cards available" story.append(safe_paragraph(device_cards_text, styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("Anomaly Detection", styles['Heading2'])) story.append(safe_paragraph(anomalies, styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("AMC Reminders", styles['Heading2'])) story.append(safe_paragraph(amc_reminders, styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("Dashboard Insights", styles['Heading2'])) story.append(safe_paragraph(insights, styles['Normal'])) story.append(Spacer(1, 12)) story.append(Paragraph("Charts", styles['Heading2'])) if not plotly_available: story.append(safe_paragraph("Charts unavailable: plotly not installed.", styles['Normal'])) else: chart_count = sum(1 for chart in [daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart] if chart is not None) story.append(safe_paragraph(f"[Chart placeholders - {chart_count} charts included, see dashboard for visuals]", styles['Normal'])) doc.build(story) logging.info(f"PDF generated successfully at {pdf_path}") return pdf_path except Exception as e: logging.error(f"Failed to generate PDF: {str(e)}. Check input data or reportlab configuration. Input summary: {summary[:100]}...") return None # Main processing function async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, cached_df_state, last_modified_state): start_time = time.time() try: if not file_obj: return "No file uploaded.", "

No data available.

", None, '

No device cards available.

', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, cached_df_state, last_modified_state file_path = file_obj.name current_modified_time = os.path.getmtime(file_path) # Read file only if it's new or modified if cached_df_state is None or current_modified_time != last_modified_state: logging.info(f"Processing new or modified file: {file_path}") if not file_path.endswith(".csv"): return "Please upload a CSV file.", "

Invalid file format.

", None, '

No device cards available.

', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"] dtypes = { "device_id": "string", "log_type": "string", "status": "string", "usage_hours": "float32", "downtime": "float32", "amc_date": "string" } df = pd.read_csv(file_path, dtype=dtypes) missing_columns = [col for col in required_columns if col not in df.columns] if missing_columns: return f"Missing columns: {missing_columns}", "

Missing required columns.

", None, '

No device cards available.

', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce') df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce') if df["timestamp"].dt.tz is None: df["timestamp"] = df["timestamp"].dt.tz_localize('UTC').dt.tz_convert('Asia/Kolkata') if df.empty: return "No data available.", "

No data available.

", None, '

No device cards available.

', None, None, None, None, "", "", "", None, df, current_modified_time else: df = cached_df_state # Apply filters filtered_df = df.copy() if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns: filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter] if equipment_type_filter and equipment_type_filter != 'All' and 'equipment_type' in filtered_df.columns: filtered_df = filtered_df[filtered_df['equipment_type'] == equipment_type_filter] if date_range is not None: if isinstance(date_range, (int, float)): days = int(date_range) date_range = [days, days] logging.info(f"Converted single value {days} to range {date_range}") if len(date_range) != 2 or not all(isinstance(x, (int, float)) for x in date_range) or date_range[0] > date_range[1]: logging.error(f"Invalid date range format: {date_range}. Expected [start, end] with start <= end (e.g., [-45, -28]).") return "Invalid date range. Please use [start, end] where start <= end (e.g., [-45, -28]) or a single integer (e.g., -30).", "

Error processing data.

", None, '

Error processing data.

', None, None, None, None, "", "", "", None, df, current_modified_time days_start, days_end = date_range today = pd.to_datetime(datetime.now()).tz_localize('Asia/Kolkata') start_date = today + pd.Timedelta(days=days_start) end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1) start_date = start_date.tz_convert('Asia/Kolkata') if start_date.tzinfo else start_date.tz_localize('Asia/Kolkata') end_date = end_date.tz_convert('Asia/Kolkata') if end_date.tzinfo else end_date.tz_localize('Asia/Kolkata') logging.info(f"Date range filter applied: start_date={start_date}, end_date={end_date}") logging.info(f"Before date filter: {len(filtered_df)} rows") filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)] logging.info(f"After date filter: {len(filtered_df)} rows") if days_start > days_end: logging.warning("Start date is after end date; results may be empty or unexpected.") if filtered_df.empty: return "No data after applying filters.", "

No data after filters.

", None, '

No device cards available.

', None, None, None, None, "", "", "", None, df, current_modified_time # Generate table for preview preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5) preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0) # Run critical tasks concurrently with ThreadPoolExecutor(max_workers=2) as executor: future_anomalies = executor.submit(detect_anomalies, filtered_df) future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now()) summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}" anomalies, anomalies_df = future_anomalies.result() anomalies = f"Anomaly Detection\n{anomalies}" amc_reminders, reminders_df = future_amc.result() amc_reminders = f"AMC Reminders\n{amc_reminders}" insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}" # Generate charts sequentially usage_chart = create_usage_chart(filtered_df) downtime_chart = create_downtime_chart(filtered_df) daily_log_chart = create_daily_log_trends_chart(filtered_df) weekly_uptime_chart = create_weekly_uptime_chart(filtered_df) anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df) device_cards = generate_device_cards(filtered_df) # Save to Salesforce after all other processing save_to_salesforce(filtered_df, reminders_df) create_salesforce_reports(filtered_df) elapsed_time = time.time() - start_time logging.info(f"Processing completed in {elapsed_time:.2f} seconds") if elapsed_time > 3: logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds") return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, df, current_modified_time) except Exception as e: logging.error(f"Failed to process file: {str(e)}") return f"Error: {str(e)}", "

Error processing data.

", None, '

Error processing data.

', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state # Generate PDF separately async def generate_pdf(summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights): try: logging.info("Starting PDF generation process") preview_df = pd.DataFrame() if not preview_html else pd.read_html(preview_html, flavor='bs4')[0] if pd.read_html(preview_html, flavor='bs4') else pd.DataFrame() logging.info("Preview DF created with shape: %s", preview_df.shape if not preview_df.empty else "Empty") pdf_file = generate_pdf_content(summary, preview_html, anomalies, amc_reminders, insights, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart) if pdf_file is None: logging.warning("PDF generation failed or disabled.") return "PDF generation failed. Check logs for details." logging.info("PDF generated successfully at: %s", pdf_file) return pdf_file except Exception as e: logging.error(f"Failed to generate PDF: {str(e)}. Input summary: {summary[:100]}...") return f"Error generating PDF: {str(e)}" # Update filters def update_filters(file_obj, current_file_state): if not file_obj or file_obj.name == current_file_state: return gr.update(), gr.update(), current_file_state try: with open(file_obj.name, 'rb') as f: csv_content = f.read().decode('utf-8') df = pd.read_csv(io.StringIO(csv_content)) df['timestamp'] = pd.to_datetime(df['timestamp'], errors='coerce') lab_site_options = ['All'] + [site for site in df['lab_site'].dropna().astype(str).unique().tolist() if site.strip()] if 'lab_site' in df.columns else ['All'] equipment_type_options = ['All'] + [equip for equip in df['equipment_type'].dropna().astype(str).unique().tolist() if equip.strip()] if 'equipment_type' in df.columns else ['All'] return gr.update(choices=lab_site_options, value='All'), gr.update(choices=equipment_type_options, value='All'), file_obj.name except Exception as e: logging.error(f"Failed to update filters: {str(e)}") return gr.update(choices=['All'], value='All'), gr.update(choices=['All'], value='All'), current_file_state # Gradio Interface try: logging.info("Initializing Gradio interface...") with gr.Blocks(css=""" .dashboard-container {border: 1px solid #e0e0e0; padding: 10px; border-radius: 5px;} .dashboard-title {font-size: 24px; font-weight: bold; margin-bottom: 5px;} .dashboard-section {margin-bottom: 20px;} .dashboard-section h3 {font-size: 18px; margin-bottom: 2px;} .dashboard-section p {margin: 1px 0; line-height: 1.2;} .dashboard-section ul {margin: 2px 0; padding-left: 20px;} .table {width: 100%; border-collapse: collapse;} .table th, .table td {border: 1px solid #ddd; padding: 8px; text-align: left;} .table th {background-color: #f2f2f2;} .table tr:nth-child(even) {background-color: #f9f9f9;} """) as iface: gr.Markdown("

LabOps Log Analyzer Dashboard

") if missing_libs: gr.Markdown(f"**Warning:** Missing required libraries: {', '.join(missing_libs)}. Install them via `pip install {' '.join([f'{lib}>=x.x.x' for lib in missing_libs])}` for full functionality.") gr.Markdown("Upload a CSV file to analyze. Click 'Analyze' to refresh the dashboard. Use 'Export PDF' for report download. Date Range can be [start, end] (e.g., [-45, -28] for June 1 to June 18) or a single integer (e.g., -30 for June 15).") last_modified_state = gr.State(value=None) current_file_state = gr.State(value=None) cached_df_state = gr.State(value=None) with gr.Row(): with gr.Column(scale=1): file_input = gr.File(label="Upload Logs (CSV)", file_types=[".csv"]) with gr.Group(): gr.Markdown("### Filters") lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True) equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True) date_range_filter = gr.Slider(label="Date Range (Days from Today)", minimum=-365, maximum=0, step=1, value=[-45, -28], interactive=True) submit_button = gr.Button("Analyze", variant="primary") pdf_button = gr.Button("Export PDF", variant="secondary") with gr.Column(scale=2): with gr.Group(elem_classes="dashboard-container"): gr.Markdown("
Analysis Results
") with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Step 1: Summary Report") summary_output = gr.Markdown() with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Step 2: Log Preview") preview_output = gr.HTML() with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Device Cards") device_cards_output = gr.HTML() with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Charts") with gr.Tab("Usage Hours per Device"): usage_chart_output = gr.Plot() if not plotly_available: gr.Markdown("**Note:** Charts are unavailable because the 'plotly' library is not installed.") with gr.Tab("Downtime per Device"): downtime_chart_output = gr.Plot() if not plotly_available: gr.Markdown("**Note:** Charts are unavailable because the 'plotly' library is not installed.") with gr.Tab("Daily Log Trends"): daily_log_trends_output = gr.Plot() if not plotly_available: gr.Markdown("**Note:** Charts are unavailable because the 'plotly' library is not installed.") with gr.Tab("Weekly Uptime Percentage"): weekly_uptime_output = gr.Plot() if not plotly_available: gr.Markdown("**Note:** Charts are unavailable because the 'plotly' library is not installed.") with gr.Tab("Anomaly Alerts"): anomaly_alerts_output = gr.Plot() if not plotly_available: gr.Markdown("**Note:** Charts are unavailable because the 'plotly' library is not installed.") with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Step 4: Anomaly Detection") anomaly_output = gr.Markdown() with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Step 5: AMC Reminders") amc_output = gr.Markdown() with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Step 6: Insights") insights_output = gr.Markdown() with gr.Group(elem_classes="dashboard-section"): gr.Markdown("### Export Report") pdf_output = gr.Markdown() if not reportlab_available: gr.Markdown("**Note:** PDF export is unavailable because the 'reportlab' library is not installed.") pdf_file_output = gr.File(label="Download Status Report as PDF") file_input.change( fn=update_filters, inputs=[file_input, current_file_state], outputs=[lab_site_filter, equipment_type_filter, current_file_state], queue=False ) submit_button.click( fn=process_logs, inputs=[file_input, lab_site_filter, equipment_type_filter, date_range_filter, cached_df_state, last_modified_state], outputs=[summary_output, preview_output, usage_chart_output, device_cards_output, daily_log_trends_output, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output, pdf_output, cached_df_state, last_modified_state] ) pdf_button.click( fn=generate_pdf, inputs=[summary_output, preview_output, usage_chart_output, device_cards_output, daily_log_trends_output, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output], outputs=[pdf_output, pdf_file_output] ) logging.info("Gradio interface initialized successfully") except Exception as e: logging.error(f"Failed to initialize Gradio interface: {str(e)}") raise e if __name__ == "__main__": try: logging.info("Launching Gradio interface...") iface.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False) logging.info("Gradio interface launched successfully") except Exception as e: logging.error(f"Failed to launch Gradio interface: {str(e)}") print(f"Error launching app: {str(e)}") raise e