File size: 4,860 Bytes
e57138c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 |
import streamlit as st
from PIL import Image
import matplotlib.pyplot as plt
import networkx as nx
import json
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
torch.cuda.empty_cache()
import os
import numpy as np
from pipeline.detector import detect_symbols_and_lines
from pipeline.graph_builder import build_graph
from pipeline.gnn_model import run_gnn
from pipeline.agent import generate_agent_actions
st.set_page_config(layout="wide")
st.title("?? Agentic Predictive Maintenance (P&ID Graph + GNN)")
# ===== Initialize Session State =====
for key, default in {
"G": None,
"feature_map": {},
"scores": {},
"fig": None,
"actions": [],
"deepseek_responses": [],
}.items():
if key not in st.session_state:
st.session_state[key] = default
# ===== Redisplay Previous Outputs =====
if st.session_state["fig"]:
st.subheader("?? Previous Graph Visualization")
st.pyplot(st.session_state["fig"])
if st.session_state["actions"]:
st.subheader("??? Previous Agent Actions")
for action in st.session_state["actions"]:
st.write(action)
if st.session_state["deepseek_responses"]:
st.subheader("?? Previous DeepSeek Responses")
for r in st.session_state["deepseek_responses"]:
st.markdown(f"**You:** {r['query']}")
st.markdown(f"**DeepSeek:** {r['answer']}")
# ===== Upload and Analyze Image =====
uploaded_file = st.file_uploader("Upload a P&ID Image", type=["png", "jpg", "jpeg"])
if uploaded_file:
image = Image.open(uploaded_file)
st.image(image, caption="P&ID Diagram", use_column_width=True)
if st.button("?? Run Detection and Analysis"):
detections, annotations, class_names = detect_symbols_and_lines(image)
graph = build_graph(image, detections, annotations, class_names)
st.info("Running anomaly detection on the graph...")
fig, feature_map, red_nodes, central_node, scores, G = run_gnn()
st.session_state.G = G
st.session_state.feature_map = feature_map
st.session_state.scores = scores
st.session_state.fig = fig
st.pyplot(fig)
actions = generate_agent_actions(fig, feature_map, red_nodes, central_node, scores)
st.session_state.actions = actions
for action in actions:
st.write(action)
# ===== DeepSeek Local Model Setup =====
@st.cache_resource
def load_deepseek_model():
model_name = "deepseek-ai/deepseek-coder-1.3b-instruct" # lightweight option
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float16,
device_map="cuda",
trust_remote_code=True
)
return model, tokenizer
# ===== DeepSeek Q&A =====
st.subheader("?? Ask Questions About the Graph (DeepSeek Local)")
user_query = st.chat_input("Ask a question about the graph...")
if user_query:
G = st.session_state.get("G")
feature_map = st.session_state.get("feature_map", {})
scores = st.session_state.get("scores", {})
if G and feature_map and scores:
graph_data = {
"nodes": [
{
"id": str(i),
"label": feature_map.get(i, f"Node {i}"),
"score": float(scores.get(i, 0.0))
}
for i in G.nodes()
],
"edges": [
{"source": str(u), "target": str(v)}
for u, v in G.edges()
]
}
prompt = (
"You are an expert graph analyst. Analyze this P&ID graph and answer the question.\n\n"
"### Graph Data:\n"
f"{json.dumps(graph_data, indent=2)}\n\n"
"### Question:\n"
f"{user_query}\n\n"
"### Answer:\n"
)
try:
with st.spinner("Thinking (via DeepSeek Local)..."):
model, tokenizer = load_deepseek_model()
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(
**inputs,
max_new_tokens=128,
temperature=0.7,
do_sample=True
)
answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
answer = answer[len(prompt):].strip()
st.session_state.deepseek_responses.append({
"query": user_query,
"answer": answer
})
st.markdown(f"**DeepSeek:** {answer}")
except Exception as e:
st.error(f"DeepSeek error: {e}")
st.error("Ensure enough GPU memory (8GB+ recommended).")
else:
st.warning("?? Please analyze a diagram first to generate a graph.")
|