import pandas as pd import faiss from sentence_transformers import SentenceTransformer from transformers import T5ForConditionalGeneration, T5Tokenizer import numpy as np import gradio as gr # --- 1. Load Models and Data (runs only once when the app starts) --- print("Loading models and data... This may take a moment.") # Load the dataset df = pd.read_csv('syn5000.csv') df.rename(columns={ 'System / Subsystem Components': 'system', 'What is the item that you are focusing on?': 'item', 'What function does the item have?': 'function', 'What are you trying to achieve (Product Requirement)?': 'requirement', 'How could you get the requirements wrong (Failure Mode)?': 'failure_mode', 'Action Taken (Risk Mitigation)': 'mitigation' }, inplace=True) df['input_text'] = ( "System: " + df['system'] + "; " + "Item: " + df['item'] + "; " + "Requirement: " + df['requirement'] + "; " + "Failure: " + df['failure_mode'] ) # Load the embedding model embedding_model = SentenceTransformer('all-MiniLM-L6-v2') # Create and index embeddings using FAISS corpus_embeddings = embedding_model.encode(df['input_text'].tolist()) embedding_dimension = corpus_embeddings.shape[1] index = faiss.IndexFlatL2(embedding_dimension) index.add(corpus_embeddings) # Load the generator model and tokenizer tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base") generator_model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base") print("Models and data loaded successfully!") # --- 2. The Core AI Logic --- def retrieve_similar_examples(query_text, top_k=3): query_embedding = embedding_model.encode([query_text]) distances, indices = index.search(query_embedding, top_k) return df.iloc[indices[0]].to_dict('records') def generate_mitigation_text(prompt): inputs = tokenizer(prompt, return_tensors="pt", max_length=1024, truncation=True) outputs = generator_model.generate(**inputs, max_length=128, num_beams=4, early_stopping=True) return tokenizer.decode(outputs[0], skip_special_tokens=True) # This is the main function that Gradio will call def suggest_mitigation_from_ui(system, item, requirement, failure_mode): """ Takes individual text inputs from the UI and returns a suggested mitigation. """ query_text = ( f"System: {system}; " f"Item: {item}; " f"Requirement: {requirement}; " f"Failure: {failure_mode}" ) similar_examples = retrieve_similar_examples(query_text) prompt = "You are an expert risk analysis engineer.\n\n" prompt += "Based on the following similar past examples, write a specific risk mitigation action for the new failure described at the end.\n\n" prompt += "--- EXAMPLES ---\n" for ex in similar_examples: prompt += f"Failure Description: {ex['input_text']}\n" prompt += f"Mitigation Action: {ex['mitigation']}\n---\n" prompt += "\n--- NEW FAILURE ---\n" prompt += f"Failure Description: {query_text}\n" prompt += "Mitigation Action:" generated_text = generate_mitigation_text(prompt) # We can also return the examples it used, for transparency retrieved_info = "--- Retrieved Similar Examples ---\n" for i, ex in enumerate(similar_examples): retrieved_info += f"{i+1}. {ex['input_text'][:150]}...\n" return generated_text, retrieved_info # --- 3. Create the Gradio Web Interface --- with gr.Blocks() as demo: gr.Markdown("# AI Risk Mitigation Assistant") gr.Markdown("Enter the details of a potential failure to get an AI-generated mitigation suggestion based on historical data.") with gr.Row(): with gr.Column(): system_input = gr.Textbox(label="System / Subsystem") item_input = gr.Textbox(label="Item in Focus") requirement_input = gr.Textbox(label="Product Requirement") failure_mode_input = gr.Textbox(label="Failure Mode") submit_btn = gr.Button("Suggest Mitigation", variant="primary") with gr.Column(): output_mitigation = gr.Textbox(label="✅ AI-Generated Mitigation Suggestion", lines=5) output_examples = gr.Textbox(label="Retrieved Examples", lines=5) submit_btn.click( fn=suggest_mitigation_from_ui, inputs=[system_input, item_input, requirement_input, failure_mode_input], outputs=[output_mitigation, output_examples] ) # This launches the app. On Hugging Face, it will be served automatically. if __name__ == "__main__": demo.launch()