asammoud commited on
Commit
e57138c
·
1 Parent(s): 04ff4c3

Added some files

Browse files
Files changed (5) hide show
  1. .gitattributes +2 -0
  2. app2.py +148 -0
  3. download_models.py +34 -0
  4. test.csv +3 -0
  5. train.csv +3 -0
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ test.csv filter=lfs diff=lfs merge=lfs -text
37
+ train.csv filter=lfs diff=lfs merge=lfs -text
app2.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from PIL import Image
3
+ import matplotlib.pyplot as plt
4
+ import networkx as nx
5
+ import json
6
+ from transformers import AutoModelForCausalLM, AutoTokenizer
7
+ import torch
8
+ torch.cuda.empty_cache()
9
+ import os
10
+ import numpy as np
11
+
12
+ from pipeline.detector import detect_symbols_and_lines
13
+ from pipeline.graph_builder import build_graph
14
+ from pipeline.gnn_model import run_gnn
15
+ from pipeline.agent import generate_agent_actions
16
+
17
+ st.set_page_config(layout="wide")
18
+ st.title("?? Agentic Predictive Maintenance (P&ID Graph + GNN)")
19
+
20
+ # ===== Initialize Session State =====
21
+ for key, default in {
22
+ "G": None,
23
+ "feature_map": {},
24
+ "scores": {},
25
+ "fig": None,
26
+ "actions": [],
27
+ "deepseek_responses": [],
28
+ }.items():
29
+ if key not in st.session_state:
30
+ st.session_state[key] = default
31
+
32
+ # ===== Redisplay Previous Outputs =====
33
+ if st.session_state["fig"]:
34
+ st.subheader("?? Previous Graph Visualization")
35
+ st.pyplot(st.session_state["fig"])
36
+
37
+ if st.session_state["actions"]:
38
+ st.subheader("??? Previous Agent Actions")
39
+ for action in st.session_state["actions"]:
40
+ st.write(action)
41
+
42
+ if st.session_state["deepseek_responses"]:
43
+ st.subheader("?? Previous DeepSeek Responses")
44
+ for r in st.session_state["deepseek_responses"]:
45
+ st.markdown(f"**You:** {r['query']}")
46
+ st.markdown(f"**DeepSeek:** {r['answer']}")
47
+
48
+ # ===== Upload and Analyze Image =====
49
+ uploaded_file = st.file_uploader("Upload a P&ID Image", type=["png", "jpg", "jpeg"])
50
+ if uploaded_file:
51
+ image = Image.open(uploaded_file)
52
+ st.image(image, caption="P&ID Diagram", use_column_width=True)
53
+
54
+ if st.button("?? Run Detection and Analysis"):
55
+ detections, annotations, class_names = detect_symbols_and_lines(image)
56
+ graph = build_graph(image, detections, annotations, class_names)
57
+
58
+ st.info("Running anomaly detection on the graph...")
59
+
60
+ fig, feature_map, red_nodes, central_node, scores, G = run_gnn()
61
+
62
+ st.session_state.G = G
63
+ st.session_state.feature_map = feature_map
64
+ st.session_state.scores = scores
65
+ st.session_state.fig = fig
66
+
67
+ st.pyplot(fig)
68
+
69
+ actions = generate_agent_actions(fig, feature_map, red_nodes, central_node, scores)
70
+ st.session_state.actions = actions
71
+
72
+ for action in actions:
73
+ st.write(action)
74
+
75
+ # ===== DeepSeek Local Model Setup =====
76
+ @st.cache_resource
77
+ def load_deepseek_model():
78
+ model_name = "deepseek-ai/deepseek-coder-1.3b-instruct" # lightweight option
79
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
80
+ model = AutoModelForCausalLM.from_pretrained(
81
+ model_name,
82
+ torch_dtype=torch.float16,
83
+ device_map="cuda",
84
+ trust_remote_code=True
85
+ )
86
+ return model, tokenizer
87
+
88
+ # ===== DeepSeek Q&A =====
89
+ st.subheader("?? Ask Questions About the Graph (DeepSeek Local)")
90
+ user_query = st.chat_input("Ask a question about the graph...")
91
+
92
+ if user_query:
93
+ G = st.session_state.get("G")
94
+ feature_map = st.session_state.get("feature_map", {})
95
+ scores = st.session_state.get("scores", {})
96
+
97
+ if G and feature_map and scores:
98
+ graph_data = {
99
+ "nodes": [
100
+ {
101
+ "id": str(i),
102
+ "label": feature_map.get(i, f"Node {i}"),
103
+ "score": float(scores.get(i, 0.0))
104
+ }
105
+ for i in G.nodes()
106
+ ],
107
+ "edges": [
108
+ {"source": str(u), "target": str(v)}
109
+ for u, v in G.edges()
110
+ ]
111
+ }
112
+
113
+ prompt = (
114
+ "You are an expert graph analyst. Analyze this P&ID graph and answer the question.\n\n"
115
+ "### Graph Data:\n"
116
+ f"{json.dumps(graph_data, indent=2)}\n\n"
117
+ "### Question:\n"
118
+ f"{user_query}\n\n"
119
+ "### Answer:\n"
120
+ )
121
+
122
+ try:
123
+ with st.spinner("Thinking (via DeepSeek Local)..."):
124
+ model, tokenizer = load_deepseek_model()
125
+
126
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
127
+ outputs = model.generate(
128
+ **inputs,
129
+ max_new_tokens=128,
130
+ temperature=0.7,
131
+ do_sample=True
132
+ )
133
+
134
+ answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
135
+ answer = answer[len(prompt):].strip()
136
+
137
+ st.session_state.deepseek_responses.append({
138
+ "query": user_query,
139
+ "answer": answer
140
+ })
141
+
142
+ st.markdown(f"**DeepSeek:** {answer}")
143
+
144
+ except Exception as e:
145
+ st.error(f"DeepSeek error: {e}")
146
+ st.error("Ensure enough GPU memory (8GB+ recommended).")
147
+ else:
148
+ st.warning("?? Please analyze a diagram first to generate a graph.")
download_models.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # download_models.py
2
+ import os
3
+ import gdown
4
+ import zipfile
5
+
6
+ def download_all():
7
+ # Make necessary directories
8
+ os.makedirs("output", exist_ok=True)
9
+ os.makedirs("P&ID-Symbols-3/train", exist_ok=True)
10
+
11
+ # File ID mapping
12
+ file_map = {
13
+ "output/checkpoint0009.pth": "1WfsV8ZuDwlgvBsompA8jdpG_XqNGsWGT",
14
+ "output/checkpoint_best_total.pth": "1UpVLNeKDrocU4UgBe361IwvHnpFkKues",
15
+ "rf-detr-base.pth": "1L7mU1jyQNLxJcex3jTd5wccdtUrhgQ2c",
16
+ "P&ID-Symbols-3/train/_annotations.coco.json": "159ArLMxS1PZ4zD6CAf7bbJfEvLToFA-C",
17
+ "P&ID-Symbols-3.zip": "1HfjxUN7j92XCciKWB4_ZkC_W6UUKWtDd"
18
+ }
19
+
20
+ # Download each file
21
+ for path, file_id in file_map.items():
22
+ if not os.path.exists(path):
23
+ print(f"\nDownloading {path} ...")
24
+ gdown.download(id=file_id, output=path, quiet=False)
25
+
26
+ # Unzip image folder if not already extracted
27
+ zip_path = "P&ID-Symbols-3.zip"
28
+ if os.path.exists(zip_path):
29
+ print(f"\nExtracting {zip_path} ...")
30
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
31
+ zip_ref.extractall(".")
32
+ print("? Extraction complete.")
33
+
34
+ print("? All files downloaded and set up.")
test.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c25a116e1561d9a3a810013ee35de6cfacf0468fb1372c1df25636b01588ff7
3
+ size 15724537
train.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbca99010e6d52117a7470bafc6eddf764028c7e7c2dea8b95810278734930f0
3
+ size 16726262