Mission-Control / src /streamlit_app.py
workbykait's picture
Update src/streamlit_app.py
491c55a verified
import streamlit as st
import pandas as pd
import numpy as np
import plotly.express as px
import plotly.graph_objects as go
from datetime import datetime
import tempfile
import os
from gradio_client import Client, handle_file
from sklearn.ensemble import IsolationForest
from PIL import Image
import io
st.set_page_config(
page_title="πŸ›°οΈ Satellite Log Companion",
layout="wide",
initial_sidebar_state="expanded",
page_icon="πŸ›°οΈ"
)
# Custom CSS for a clean mission-control look
st.markdown("""
<style>
.main {background-color: #0e1117;}
.stPlotlyChart {background-color: #1a1f2e; border-radius: 10px; padding: 10px;}
.kpi {font-size: 2rem; font-weight: bold; text-align: center;}
</style>
""", unsafe_allow_html=True)
# ====================== SESSION STATE & CACHING ======================
if "df" not in st.session_state:
st.session_state.df = None
if "gradio_client" not in st.session_state:
st.session_state.gradio_client = None
@st.cache_resource
def get_gradio_client(space_url: str, hf_token: str = None):
try:
if hf_token:
client = Client.duplicate(space_url, hf_token=hf_token)
else:
client = Client(space_url)
return client
except Exception as e:
st.error(f"Failed to connect to Gradio: {e}")
return None
@st.cache_data
def load_sample_data():
"""Synthetic realistic satellite telemetry with injected anomalies"""
dates = pd.date_range("2025-02-01", periods=500, freq="5min")
np.random.seed(42)
data = {
"timestamp": dates,
"temperature_C": np.random.normal(25, 5, 500).clip(10, 45),
"voltage_V": np.random.normal(28, 1, 500).clip(24, 32),
"signal_dB": np.random.normal(-90, 5, 500).clip(-110, -70),
"altitude_km": np.random.normal(400, 10, 500).clip(380, 420),
"error_rate": np.random.poisson(2, 500) / 1000,
}
df = pd.DataFrame(data)
# Inject anomalies
df.loc[100:105, "temperature_C"] += 25 # thermal spike
df.loc[250:255, "voltage_V"] -= 8 # power drop
df.loc[400:405, "signal_dB"] += 30 # signal surge
return df
def parse_uploaded_file(uploaded):
if uploaded.name.endswith(".csv"):
return pd.read_csv(uploaded)
else:
st.warning("Only CSV supported for now. Your Gradio app can handle other formats.")
return None
# ====================== SIDEBAR ======================
with st.sidebar:
st.title("πŸ›°οΈ Mission Control")
st.markdown("**Companion to your Gradio Satellite Log Analyzer**")
gradio_url = st.text_input(
"Your Gradio Analyzer Space URL",
value="https://yourusername-satellite-log-analyzer.hf.space",
help="Paste the full URL of your Gradio Space"
)
hf_token = st.text_input("HF Token (optional - for private/duplicated Gradio)", type="password")
if st.button("πŸ”— Connect to Gradio"):
st.session_state.gradio_client = get_gradio_client(gradio_url, hf_token)
if st.session_state.gradio_client:
st.success("Connected! Use the Gradio tab to analyze.")
st.session_state.gradio_client.view_api() # shows in console/logs
st.divider()
if st.button("πŸ“₯ Load Sample Satellite Telemetry"):
st.session_state.df = load_sample_data()
st.success("Sample data loaded (with injected anomalies)!")
# ====================== TABS ======================
tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs([
"🏠 Home", "πŸ“€ Upload & Parse", "πŸ” Data Explorer",
"πŸ“ˆ Visualizations", "πŸ•΅οΈ Local Analysis", "πŸ€– Gradio Integration", "πŸ“Š Reports"
])
with tab1:
st.title("πŸ›°οΈ Satellite Log Companion Dashboard")
st.markdown("""
This app **pairs perfectly** with your Gradio Satellite Log Analyzer.
β€’ Upload & explore raw telemetry
β€’ Interactive plots & local ML anomalies
β€’ One-click send to your Gradio app for deep AI analysis
β€’ Export professional reports
""")
st.info("πŸ‘ˆ Set your Gradio URL in the sidebar β†’ then use the **Gradio Integration** tab")
with tab2:
st.header("Upload Logs")
uploaded = st.file_uploader("Upload satellite log (CSV)", type=["csv"], accept_multiple_files=False)
if uploaded:
df = parse_uploaded_file(uploaded)
if df is not None:
st.session_state.df = df
st.success(f"Loaded {len(df)} rows β€’ Columns: {list(df.columns)}")
if st.session_state.df is not None:
st.subheader("Preview")
st.dataframe(st.session_state.df.head(100), use_container_width=True)
with tab3:
if st.session_state.df is None:
st.warning("Upload data or load sample first")
else:
st.header("Data Explorer")
col1, col2 = st.columns(2)
with col1:
st.metric("Rows", len(st.session_state.df))
st.metric("Columns", len(st.session_state.df.columns))
with col2:
if "timestamp" in st.session_state.df.columns:
st.session_state.df["timestamp"] = pd.to_datetime(st.session_state.df["timestamp"], errors="coerce")
st.metric("Time Span", f"{st.session_state.df['timestamp'].min().date()} β†’ {st.session_state.df['timestamp'].max().date()}")
st.dataframe(st.session_state.df.describe(), use_container_width=True)
# Column filter
cols = st.multiselect("Select columns to view", st.session_state.df.columns, default=st.session_state.df.columns[:6])
st.dataframe(st.session_state.df[cols], use_container_width=True)
with tab4:
if st.session_state.df is None:
st.warning("No data yet")
else:
st.header("Interactive Visualizations")
numeric_cols = st.session_state.df.select_dtypes(include=np.number).columns.tolist()
y_cols = st.multiselect("Telemetry parameters (Y-axis)", numeric_cols, default=numeric_cols[:3])
if "timestamp" in st.session_state.df.columns and y_cols:
fig = px.line(st.session_state.df, x="timestamp", y=y_cols, title="Telemetry Time Series")
st.plotly_chart(fig, use_container_width=True)
# Correlation heatmap
if len(numeric_cols) > 1:
corr = st.session_state.df[numeric_cols].corr()
fig_heat = px.imshow(corr, text_auto=True, aspect="auto", title="Parameter Correlation")
st.plotly_chart(fig_heat, use_container_width=True)
# If lat/lon present β†’ map
if {"latitude", "longitude"}.issubset(st.session_state.df.columns):
st.map(st.session_state.df.rename(columns={"latitude":"lat", "longitude":"lon"}))
with tab5:
if st.session_state.df is None:
st.warning("No data")
else:
st.header("Local Anomaly Detection")
numeric_cols = st.session_state.df.select_dtypes(include=np.number).columns.tolist()
method = st.radio("Detection method", ["Z-Score (simple)", "Isolation Forest (ML)"])
if method == "Z-Score (simple)":
threshold = st.slider("Z-Score threshold", 2.0, 5.0, 3.0)
for col in numeric_cols:
z = np.abs((st.session_state.df[col] - st.session_state.df[col].mean()) / st.session_state.df[col].std())
anomalies = z > threshold
st.session_state.df[f"{col}_anomaly"] = anomalies
st.success(f"Found anomalies in {sum(anomalies)} rows")
st.dataframe(st.session_state.df[st.session_state.df.filter(like="_anomaly").any(axis=1)])
else: # Isolation Forest
if st.button("Run Isolation Forest"):
X = st.session_state.df[numeric_cols].fillna(0)
iso = IsolationForest(contamination=0.05, random_state=42)
preds = iso.fit_predict(X)
st.session_state.df["isolation_anomaly"] = preds == -1
st.success(f"Isolation Forest flagged {sum(preds == -1)} anomalies")
fig = px.scatter(st.session_state.df, x="timestamp" if "timestamp" in st.session_state.df else numeric_cols[0],
y=numeric_cols[0], color="isolation_anomaly", title="Anomalies Highlighted")
st.plotly_chart(fig, use_container_width=True)
with tab6:
st.header("πŸ€– Send to Your Gradio Analyzer")
if st.session_state.gradio_client is None:
st.warning("Connect your Gradio Space in the sidebar first")
elif st.session_state.df is None:
st.warning("Load/upload data first")
else:
st.info("The app will save your current dataframe as CSV and send it to your Gradio Space.")
col1, col2 = st.columns([3,1])
with col1:
if st.button("πŸš€ Send Current Log to Gradio Analyzer", type="primary", use_container_width=True):
with tempfile.NamedTemporaryFile(suffix=".csv", delete=False) as tmp:
st.session_state.df.to_csv(tmp.name, index=False)
tmp_path = tmp.name
client = st.session_state.gradio_client
with st.spinner("Sending to Gradio... (may queue if busy)"):
try:
# Use submit for long-running satellite log jobs
job = client.submit(
log_file=handle_file(tmp_path), # your Gradio likely has a file input named "log_file" or similar
api_name="/predict" # change if your function name is different (check view_api)
)
status_placeholder = st.empty()
while not job.done():
status = job.status()
status_placeholder.info(f"Status: {status.code} | Queue: {getattr(status, 'rank', 'N/A')}")
# st.progress would need polling, but this works
result = job.result()
st.success("Gradio analysis complete!")
st.subheader("Gradio Results")
# Flexible rendering of any output type(s)
if isinstance(result, (list, tuple)):
for i, out in enumerate(result):
st.markdown(f"**Output {i+1}**")
if isinstance(out, str):
st.markdown(out)
elif isinstance(out, pd.DataFrame):
st.dataframe(out)
elif isinstance(out, (bytes, io.BytesIO)):
st.image(out)
elif isinstance(out, Image.Image):
st.image(out)
else:
st.write(out)
else:
st.write(result)
except Exception as e:
st.error(f"Gradio call failed: {e}")
finally:
os.unlink(tmp_path)
st.caption("Tip: Open your Gradio Space in another tab to compare side-by-side")
with tab7:
if st.session_state.df is None:
st.warning("No data")
else:
st.header("Generate Reports")
st.download_button(
"πŸ“₯ Download full CSV",
st.session_state.df.to_csv(index=False),
file_name=f"satellite_log_{datetime.now().strftime('%Y%m%d_%H%M')}.csv",
mime="text/csv"
)
# Simple HTML report
report_md = f"""
# Satellite Telemetry Report
**Generated:** {datetime.now()}
## Summary
- Rows: {len(st.session_state.df)}
- Time range: {st.session_state.df['timestamp'].min() if 'timestamp' in st.session_state.df.columns else 'N/A'}
## Key Stats
{st.session_state.df.describe().to_markdown()}
## Anomalies (if detected)
Check the Local Analysis tab.
"""
st.download_button("πŸ“„ Download Markdown Report", report_md, file_name="report.md")
st.success("All done! Your Gradio + Streamlit combo is now a full satellite operations suite.")
# Footer
st.caption("Built as a companion to your Hugging Face Gradio Satellite Log Analyzer β€’ Powered by Streamlit + gradio_client")