Spaces:
Sleeping
Sleeping
1st commit
Browse files
Contacts Prescripteurs-Ensemble des données.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
Lieux-Ensemble des données.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
Thèmes de séjour-Ensemble des données.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
langchain_csv_gradio.py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
import io
|
3 |
+
import pandas as pd
|
4 |
+
from langchain_experimental.agents.agent_toolkits import create_csv_agent
|
5 |
+
from langchain_anthropic import ChatAnthropic
|
6 |
+
import gradio as gr
|
7 |
+
|
8 |
+
# Configuration du modèle Anthropic
|
9 |
+
llm = ChatAnthropic(
|
10 |
+
model="claude-3-7-sonnet-20250219",#claude-3-5-sonnet-20241022",
|
11 |
+
temperature=0.1,
|
12 |
+
api_key="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"
|
13 |
+
)
|
14 |
+
|
15 |
+
# Liste des fichiers CSV disponibles
|
16 |
+
items = [
|
17 |
+
"Contacts Prescripteurs-Ensemble des données.csv",
|
18 |
+
"Lieux-Ensemble des données.csv",
|
19 |
+
"Thèmes de séjour-Ensemble des données.csv"
|
20 |
+
]
|
21 |
+
|
22 |
+
# Fonction pour interroger les données CSV
|
23 |
+
def query_data(query, csv_file_path):
|
24 |
+
# Rediriger la sortie standard vers un buffer
|
25 |
+
old_stdout = sys.stdout
|
26 |
+
new_stdout = io.StringIO()
|
27 |
+
sys.stdout = new_stdout
|
28 |
+
|
29 |
+
# Créer l'agent et invoquer la requête
|
30 |
+
agent = create_csv_agent(
|
31 |
+
llm,
|
32 |
+
csv_file_path,
|
33 |
+
verbose=True,
|
34 |
+
return_intermediate_steps=True,
|
35 |
+
max_iterations=8,
|
36 |
+
allow_dangerous_code=True,
|
37 |
+
handle_parsing_errors=True,
|
38 |
+
include_df_in_prompt=False,
|
39 |
+
# number_of_head_rows=1
|
40 |
+
)
|
41 |
+
try:
|
42 |
+
response = agent.invoke(query)
|
43 |
+
# Récupérer la sortie de la console
|
44 |
+
console_output = new_stdout.getvalue()
|
45 |
+
|
46 |
+
# Restaurer la sortie standard
|
47 |
+
sys.stdout = old_stdout
|
48 |
+
print(response)
|
49 |
+
# Retourner la réponse et la sortie de la console
|
50 |
+
return {
|
51 |
+
"output": response["output"],
|
52 |
+
"console_output": response['intermediate_steps']
|
53 |
+
}
|
54 |
+
except Exception as e:
|
55 |
+
console_output = new_stdout.getvalue()
|
56 |
+
|
57 |
+
# Restaurer la sortie standard
|
58 |
+
sys.stdout = old_stdout
|
59 |
+
# Retourner la réponse et la sortie de la console
|
60 |
+
return {
|
61 |
+
"output": f"**Error :** {e}",
|
62 |
+
"console_output": console_output
|
63 |
+
}
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
+
# Fonction de chat
|
68 |
+
def chat(question, csv_file_path, chat_history):
|
69 |
+
csv_file_path +=".csv"
|
70 |
+
question=f"df.columns : \n{pd.read_csv(csv_file_path).columns}\n\n"+question
|
71 |
+
print(question)
|
72 |
+
# Appeler la fonction query_data
|
73 |
+
result = query_data(question, csv_file_path)
|
74 |
+
|
75 |
+
# Ajouter la question, la réponse finale et la sortie de la console à l'historique du chat
|
76 |
+
chat_history.append((question, f"**Réflexion de l'IA :**\n{result['console_output']}\n\n**Réponse finale**:\n{result['output']}"))
|
77 |
+
|
78 |
+
# Retourner l'historique mis à jour
|
79 |
+
return chat_history
|
80 |
+
|
81 |
+
# Création de l'interface Gradio
|
82 |
+
with gr.Blocks() as demo:
|
83 |
+
gr.Markdown("""# Your airtable assistant
|
84 |
+
Les conversations ne sont pas encore disponibles.""")
|
85 |
+
|
86 |
+
with gr.Row():
|
87 |
+
chatbot = gr.Chatbot(label="Chat") # Composant Chatbot pour afficher les messages
|
88 |
+
|
89 |
+
with gr.Row(): # Une ligne pour organiser les composants côte à côte
|
90 |
+
with gr.Column(): # Colonne pour la question
|
91 |
+
question = gr.Textbox(label="Posez votre question")
|
92 |
+
submit_btn = gr.Button("Envoyer")
|
93 |
+
with gr.Column(): # Colonne pour le dropdown
|
94 |
+
dropdown = gr.Dropdown(choices=[elt[:-4] for elt in items], label="Sélectionnez une table")
|
95 |
+
|
96 |
+
# Initialiser l'historique du chat
|
97 |
+
chat_history = gr.State([]) # Utilisé pour stocker l'historique des messages
|
98 |
+
|
99 |
+
# Lier les composants à la fonction de chat
|
100 |
+
submit_btn.click(
|
101 |
+
chat, # Fonction à appeler
|
102 |
+
inputs=[question, dropdown, chat_history], # Entrées
|
103 |
+
outputs=chatbot # Sortie
|
104 |
+
)
|
105 |
+
|
106 |
+
# Lancer l'interface
|
107 |
+
demo.launch(inbrowser=True)
|
requirements.txt
ADDED
Binary file (4.68 kB). View file
|
|