asammoud commited on
Commit
e5fa36b
·
1 Parent(s): 0b08a53

Upload 3 files

Browse files
Files changed (3) hide show
  1. README.md +2 -19
  2. app.py +171 -0
  3. requirements.txt +16 -3
README.md CHANGED
@@ -1,19 +1,2 @@
1
- ---
2
- title: Agentic Predictive Maintenance
3
- emoji: 🚀
4
- colorFrom: red
5
- colorTo: red
6
- sdk: docker
7
- app_port: 8501
8
- tags:
9
- - streamlit
10
- pinned: false
11
- short_description: Streamlit template space
12
- ---
13
-
14
- # Welcome to Streamlit!
15
-
16
- Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
17
-
18
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
19
- forums](https://discuss.streamlit.io).
 
1
+ # Agentic_predective_maintenance
2
+ # Agentic_predective_maintenance
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from PIL import Image
3
+ import matplotlib.pyplot as plt
4
+ import networkx as nx
5
+ import json
6
+ from transformers import AutoModelForCausalLM, AutoTokenizer
7
+ import torch
8
+ torch.cuda.empty_cache()
9
+ #import openai
10
+ import os
11
+ import numpy as np
12
+
13
+ # Ensure models and datasets are available
14
+ from download_models import download_all
15
+
16
+ # Run only if critical file is missing
17
+ if not os.path.exists("P&ID-Symbols-3/train/_annotations.coco.json"):
18
+ with st.spinner("Downloading required files (models & datasets)..."):
19
+ download_all()
20
+
21
+ from pipeline.detector import detect_symbols_and_lines
22
+ from pipeline.graph_builder import build_graph
23
+ from pipeline.gnn_model import run_gnn
24
+ from pipeline.agent import generate_agent_actions
25
+
26
+ st.set_page_config(layout="wide")
27
+ st.title(" Agentic Predictive Maintenance (P&ID Graph + GNN)")
28
+
29
+
30
+ # Initialize session state variables
31
+ if "G" not in st.session_state:
32
+ st.session_state.G = None
33
+ if "feature_map" not in st.session_state:
34
+ st.session_state.feature_map = {}
35
+ if "scores" not in st.session_state:
36
+ st.session_state.scores = {}
37
+
38
+ #uploaded_file = st.file_uploader("Upload a P&ID Image", type=["png", "jpg", "jpeg"])
39
+ #if uploaded_file:
40
+ # === User can choose from downloaded dataset OR upload their own ===
41
+ st.subheader("Upload or Select a P&ID Drawing")
42
+
43
+ local_dataset_dir = "P&ID-Symbols-3/P&ID-Symbols-3/test"
44
+ image_files = []
45
+ if os.path.exists(local_dataset_dir):
46
+ image_files = [f for f in os.listdir(local_dataset_dir) if f.lower().endswith((".png", ".jpg", ".jpeg"))]
47
+ else:
48
+ st.warning(f"Dataset folder not found: {local_dataset_dir}. Please run download_models.py to download it.")
49
+
50
+ selected_image = st.selectbox("Select a sample from P&ID-Symbols-3:", ["-- Select an example --"] + image_files)
51
+ uploaded_file = st.file_uploader("...Or upload your own P&ID image", type=["png", "jpg", "jpeg"])
52
+
53
+ image = None
54
+ image_source = ""
55
+
56
+ if selected_image and selected_image != "-- Select an example --":
57
+ image_path = os.path.join(local_dataset_dir, selected_image)
58
+ image = Image.open(image_path)
59
+ image_source = f"Sample from dataset: {selected_image}"
60
+ elif uploaded_file:
61
+ image = Image.open(uploaded_file)
62
+ image_source = f"Uploaded: {uploaded_file.name}"
63
+
64
+ if image:
65
+ st.image(image, caption=image_source, use_column_width=True)
66
+
67
+ #image = Image.open(uploaded_file)
68
+ #st.image(image, caption="P&ID Diagram", use_column_width=True)
69
+
70
+ if st.button(" Run Detection and Analysis"):
71
+ # Uncomment these when detection and graph building pipelines are ready
72
+ detections, annotations, class_names = detect_symbols_and_lines(image)
73
+ graph = build_graph(image, detections, annotations, class_names)
74
+
75
+ st.info("Running anomaly detection on the graph (simulated for now)...")
76
+
77
+ fig, feature_map, red_nodes, central_node, scores, G = run_gnn()
78
+
79
+ st.session_state.G = G
80
+ st.session_state.feature_map = feature_map
81
+ st.session_state.scores = scores
82
+
83
+ st.pyplot(fig)
84
+
85
+ actions = generate_agent_actions(fig, feature_map, red_nodes, central_node, scores)
86
+ for action in actions:
87
+ st.write(action)
88
+
89
+
90
+
91
+
92
+ # === DeepSeek Local Model Setup ===
93
+ @st.cache_resource
94
+ def load_deepseek_model():
95
+ model_name = "deepseek-ai/deepseek-coder-1.3b-instruct" # Lightweight version
96
+ # model_name = "deepseek-ai/deepseek-llm-7b" # Larger but more capable
97
+
98
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
99
+ '''model = AutoModelForCausalLM.from_pretrained(
100
+ model_name,
101
+ torch_dtype=torch.float16,
102
+ device_map="auto",
103
+ trust_remote_code=True'''
104
+ model = AutoModelForCausalLM.from_pretrained(
105
+ model_name,
106
+ torch_dtype=torch.float16,
107
+ device_map="cpu",
108
+ #load_in_4bit=True, # 4-bit quantization
109
+ trust_remote_code=True
110
+ )
111
+ return model, tokenizer
112
+
113
+ # === Q&A Interface ===
114
+ st.subheader(" Ask Questions About the Graph (DeepSeek Local)")
115
+ user_query = st.chat_input("Ask a question about the graph...")
116
+
117
+ if user_query:
118
+ G = st.session_state.get("G")
119
+ feature_map = st.session_state.get("feature_map", {})
120
+ scores = st.session_state.get("scores", [])
121
+
122
+ if G is not None and feature_map and len(scores) > 0:
123
+ graph_data = {
124
+ "nodes": [
125
+ {
126
+ "id": str(i),
127
+ "label": feature_map[i] if i < len(feature_map) else f"Node {i}",
128
+ "score": float(scores[i]) if i < len(scores) else 0.0
129
+ }
130
+ for i in G.nodes()
131
+ ],
132
+ "edges": [
133
+ {"source": str(u), "target": str(v)}
134
+ for u, v in G.edges()
135
+ ]
136
+ }
137
+
138
+ prompt = (
139
+ "You are an expert graph analyst. Analyze this P&ID graph and answer the question.\n\n"
140
+ "### Graph Data:\n"
141
+ f"{json.dumps(graph_data, indent=2)}\n\n"
142
+ "### Question:\n"
143
+ f"{user_query}\n\n"
144
+ "### Answer:\n"
145
+ )
146
+
147
+ try:
148
+ with st.spinner("Thinking (via DeepSeek Local)..."):
149
+ # Load model (cached after first run)
150
+ model, tokenizer = load_deepseek_model()
151
+
152
+ # Generate response
153
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
154
+ outputs = model.generate(
155
+ **inputs,
156
+ max_new_tokens=128,
157
+ temperature=0.7,
158
+ do_sample=True
159
+ )
160
+
161
+ answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
162
+ # Remove the prompt from the answer
163
+ answer = answer[len(prompt):].strip()
164
+
165
+ st.markdown(f"**DeepSeek:** {answer}")
166
+
167
+ except Exception as e:
168
+ st.error(f"DeepSeek error: {e}")
169
+ st.error("Make sure you have enough GPU memory (8GB+ recommended for 7B model)")
170
+ else:
171
+ st.warning("Graph or scores are not ready yet.")
requirements.txt CHANGED
@@ -1,3 +1,16 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ streamlit
2
+ networkx
3
+ matplotlib
4
+ Pillow
5
+ supervision
6
+ opencv-python-headless
7
+ pydantic
8
+ peft
9
+ pycocotools
10
+ numpy
11
+ gdown
12
+ scikit-learn
13
+ torch-geometric
14
+ torch>=2.0.0
15
+ torchvision>=0.15.0
16
+ transformers>=4.28.0