Spaces:
Sleeping
Sleeping
File size: 6,762 Bytes
626c449 1ee80e7 9ef3aa7 4d9a2f5 f8d9c8a cc39e44 f8d9c8a e916724 f8d9c8a 4d9a2f5 1ee80e7 a555792 1ee80e7 e55813a 626c449 e55813a 1ee80e7 e55813a e916724 1ee80e7 e916724 1ee80e7 f5f591a 4d9a2f5 e916724 1ee80e7 626c449 4d9a2f5 626c449 4d9a2f5 e916724 1ee80e7 e916724 1ee80e7 a555792 e916724 f5f591a 1ee80e7 e916724 626c449 4d9a2f5 f5f591a 626c449 f5f591a e55813a f5f591a 626c449 4d9a2f5 f5f591a e916724 f5f591a e916724 626c449 e916724 e55813a e916724 e55813a f731ddb e55813a e916724 f5f591a 0156020 f5f591a e916724 f731ddb e916724 f5f591a e916724 4d9a2f5 1ee80e7 e55813a e916724 e55813a 1ee80e7 e916724 f5f591a e916724 f5f591a 0156020 f5f591a e916724 f731ddb f5f591a 0156020 9ef3aa7 9ec3492 e916724 e55813a 1ee80e7 e916724 1ee80e7 e916724 1ee80e7 4d9a2f5 1ee80e7 626c449 e916724 cc39e44 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 |
import asyncio
from functools import partial
import json
import gradio as gr
import pandas as pd
from src.production.flow import generate_data
from src.production.metrics.machine import machine_metrics, fetch_issues
from src.production.metrics.tools import tools_metrics
from src.ui.graphs.general_graphs import GeneralMetricsDisplay
from src.ui.graphs.tools_graphs import ToolMetricsDisplay
MAX_ROWS = 1000
TOOLS_COUNT = 2
def hash_dataframe(df):
"""Computes a simple hash to detect changes in the DataFrame."""
return pd.util.hash_pandas_object(df).sum()
async def dataflow(state):
"""
Main function that updates data if necessary.
Avoids processing if the raw data hasn't changed.
"""
# Initialize state
state.setdefault('data', {}).setdefault('tools', {})
state['data']['tools'].setdefault('all', pd.DataFrame())
for i in range(1, TOOLS_COUNT + 1):
state['data']['tools'].setdefault(f'tool_{i}', pd.DataFrame())
state['data'].setdefault('issues', {})
state.setdefault('status', {})
# Check running state
if state.get('running'):
if 'gen_task' not in state or state['gen_task'] is None or state['gen_task'].done():
state['gen_task'] = asyncio.create_task(generate_data(state))
raw_data = state['data'].get('raw_df', pd.DataFrame())
# Cold start
if raw_data.empty:
return (
[pd.DataFrame()] * TOOLS_COUNT + # outils
[pd.DataFrame()] + # all
[pd.DataFrame()] + # issues
[{}] # efficiency
)
# Limit MAX_ROWS
if len(raw_data) > MAX_ROWS:
raw_data = raw_data.tail(MAX_ROWS)
# Check if data has changed
current_hash = hash_dataframe(raw_data)
if state.get('last_hash') == current_hash:
return [
pd.DataFrame(state['data']['tools'].get(f'tool_{i}', pd.DataFrame()))
for i in range(1, TOOLS_COUNT+1)
] + [
pd.DataFrame(state['data']['tools'].get('all', pd.DataFrame()))
] + [
pd.DataFrame(state['data']['issues'])
] + [
state['status']
]
state['last_hash'] = current_hash
# Process data
tools_data = await tools_metrics(raw_data)
tools_data = {tool: df for tool, df in tools_data.items() if not df.empty}
for tool, df in tools_data.items():
state['data']['tools'][tool] = df
# Get machine metrics
machine_data = await machine_metrics(raw_data)
state['status'] = machine_data
# Get tools stats
for tool in ['tool_1', 'tool_2', 'all']:
df = state['data']['tools'].get(tool, pd.DataFrame())
if df.empty or 'Timestamp' not in df.columns:
continue
df = df.copy()
df['Timestamp'] = pd.to_datetime(df['Timestamp'], errors='coerce')
df.dropna(subset=['Timestamp'], inplace=True)
if df.empty:
continue
idx = df['Timestamp'].idxmax()
for cote in ['pos', 'ori']:
for metric_type in ['cp', 'cpk']:
column = f"{cote}_rolling_{metric_type}"
if column in df.columns:
value = df.at[idx, column]
key = f"{tool}_{metric_type}_{cote}"
state['status'][key] = round(value, 4)
# Get issues
issues = await fetch_issues(raw_data)
state['data']['issues'] = issues
# Update situation
return (
[
pd.DataFrame(state['data']['tools'].get(f'tool_{i}', pd.DataFrame()))
for i in range(1, TOOLS_COUNT + 1)
] + [
pd.DataFrame(state['data']['tools'].get('all', pd.DataFrame()))
] + [
pd.DataFrame(state['data']['issues'])
] + [
state['status']
]
)
def init_components(n=TOOLS_COUNT):
"""
Initializes the graphical objects (ToolMetricsDisplay and GeneralMetricsDisplay)
and returns:
- displays: list of display objects [GeneralMetricsDisplay, ToolMetricsDisplay1, ToolMetricsDisplay2, ...]
- tool_plots: list of tool-related Gradio components
- general_plots: list of general-related Gradio components
"""
print("Initializing components...")
displays = []
tool_plots = []
general_plots = []
for i in range(1, n + 1): # Tool metrics displays
display = ToolMetricsDisplay()
displays.append(display)
tool_plots.extend(display.tool_block(df=pd.DataFrame(), id=i))
main_display = GeneralMetricsDisplay() # General metrics display
displays.append(main_display)
general_plots.extend(
main_display.general_block(
all_tools_df=pd.DataFrame(),
issues_df=pd.DataFrame(),
status={}
)
)
return displays, tool_plots, general_plots
async def on_tick(state, displays):
"""
Tick function called periodically to update plots if data has changed.
Handles:
- Tool-specific plots (tool_1, tool_2, ..., tool_n)
- General plots (all tools, issues, efficiency)
Returns two lists of plots separately for tools and general metrics, plus state.
"""
async with state.setdefault('lock', asyncio.Lock()):
data = await dataflow(state)
tool_dfs = data[:-3] # all individual tool DataFrames
all_tools_df = data[-3] # 'all' tools DataFrame
issues_df = data[-2] # issues DataFrame
status = data[-1] # status dict
general_display = displays[-1] # General plots
general_plots = general_display.refresh(
all_tools_df=all_tools_df,
issues_df=issues_df,
status=status
)
tool_plots = [] # Tool-specific plots
for df, display in zip(tool_dfs, displays[:-1]):
tool_plots.extend(display.refresh(df=df))
with open("data/status.json", "w") as f:
json.dump(state["status"], f, indent=4)
with open("data/downtimes.json", "w") as f:
json.dump(issues_df.to_json(orient='records'), f, indent=4)
return tool_plots + general_plots + [state]
def dashboard_ui(state):
"""
Creates the Gradio interface and sets a refresh every second.
The outputs are separated into two groups for tools and general metrics to
preserve layout order and grouping.
"""
displays, tool_plots, general_plots = init_components()
timer = gr.Timer(1.0)
timer.tick(
fn=partial(on_tick, displays=displays),
inputs=[state],
outputs=tool_plots + general_plots + [state]
) |