demorrha / core /demorrha.py
ImPolymath's picture
update assistant script
a973b13
raw
history blame
7.82 kB
#coding: utf-8
import re
from os import getenv
from typing import Any
from typing import Dict
from typing import IO
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
from var_app import GlobalSystemPrompts
import streamlit as st
from openai import OpenAI
from dotenv import load_dotenv
# Charger les variables d'environnement depuis le fichier .env
class DemorrhaAssistant(object):
def __init__(self):
load_dotenv()
self.client = self.initialize_client()
self.vector_store = None
self.list_vector_store_ids = []
def initialize_client(self):
# Initialiser le client OpenAI avec la clé API
api_key = getenv("OPENAI_API_KEY")
return OpenAI(api_key=api_key)
def search_assistant(self, assistant_name="Demorrha"):
last_id = None
while True:
# Lister tous les assistants
assistants_list = self.client.beta.assistants.list(
order="desc",
limit="20",
after=last_id
)
if len(assistants_list.data) < 1:
break
last_id = assistants_list.data[-1].id
for assistant in assistants_list.data:
if assistant.name == assistant_name:
return assistant.id
return None
def search_vector_store(self, vector_store_name="Demorrha_Style"):
last_id=None
while True:
# Lister tous les assistants
vector_store_list = self.client.beta.vector_stores.list(
order="desc",
limit="20",
after=last_id
)
if len(vector_store_list.data) < 1:
break
last_id = vector_store_list.data[-1].id
for vector_store in vector_store_list.data:
if vector_store.name == f"{vector_store_name}":
return vector_store.id
return None
def load_vector_store(self, vector_store_name="Demorrha_Style"):
vector_store_id = self.search_vector_store(vector_store_name)
if vector_store_id is None:
vector_store = self.client.beta.vector_stores.create(name=f"{vector_store_name}")
self.vector_store = vector_store
else:
self.vector_store = self.client.beta.vector_stores.retrieve(vector_store_id)
return self
def get_vector_store(self):
return self.vector_store
def upload_file(self,
file_path,
purpose="assistants"):
return self.client.files.create(
file=open(file_path, "rb"),
purpose=purpose
)
def list_files_in_vector_store(self, vector_store_id):
files_list = []
last_id=None
while True:
files_list = self.client.beta.vector_stores.files.list(
vector_store_id=vector_store_id,
limit="20",
after=last_id
)
if len(files_list.data) < 1:
break
last_id = files_list.data[-1].id
for file in files_list.data:
files_list.append(file)
return files_list
def attach_file_to_vectore_store(self,
vector_store_id,
file_id):
return self.client.beta.vector_stores.files.create(
vector_store_id=vector_store_id,
file_id=file_id
)
def load_assistant(self, assistant_name="Demorrha"):
self.set_system_prompt(GlobalSystemPrompts.linguascribe())
# system_prompt = GlobalSystemPrompts.linguascribe()
assistant_id = self.search_assistant(assistant_name)
if assistant_id is None:
self.assistant = self.client.beta.assistants.create(
model="gpt-4o-mini",
name="Demorrha",
description="Traite les messages des utilisateurs et génère une traduction.",
instructions=f"{self.system_prompt}",
temperature=0.1,
tools=[{"type": "file_search"}]
)
else:
self.assistant = self.client.beta.assistants.retrieve(assistant_id)
return self
def get_assistant(self):
return self.assistant if not isinstance(self.assistant, None) else None
def get_assistant_id(self):
return self.assistant.id if not isinstance(self.assistant, None) else None
def add_file_to_vector_store(self, file_paths):
file_streams = [open(path, "rb") for path in file_paths]
file_batch = self.client.beta.vector_stores.file_batches.upload_and_poll(
vector_store_id=self.vector_store.id, files=file_streams
)
return file_batch
def set_payload(self,
content_message:str,
operation_prompt: Optional[str] = ""):
self.payload_content = f'{operation_prompt} :\n"""\n{content_message}\n"""'
return self
def set_system_prompt(self,
system_prompt: Optional[str] = ""):
self.system_prompt = system_prompt
return self
def add_vector_store_to_ressource(self, vector_store_id):
self.list_vector_store_ids.append(vector_store_id)
return self
def get_vector_store_ids(self):
return self.list_vector_store_ids
def empty_vector_store_ids(self):
self.list_vector_store_ids = []
return self
def update_vector_store_ids(self):
self.assistant = self.client.beta.assistants.update(
assistant_id=self.assistant.id,
tool_resources={"file_search": {"vector_store_ids": self.list_vector_store_ids}},
)
def use_assistant(self):
# Utiliser l'assistant ici
full_response = ""
with self.client.beta.threads.create_and_run(
assistant_id=self.assistant.id,
thread={
"messages": [
{"role": "user", "content": self.payload_content}
]
},
stream=True
) as stream:
for event in stream:
if event.event == "thread.message.delta":
full_response += event.data.delta.content[0].text.value
yield full_response + "▌"
elif event.event == "thread.message.completed":
yield event.data.content[0].text.value
return
if __name__ == "__main__":
demorrha = DemorrhaAssistant()
vector_store = demorrha.load_vector_store().get_vector_store()
demorrha.empty_vector_store_ids()
print(vector_store)
if vector_store.status == "completed":
if vector_store.file_counts.total > 0:
if vector_store.file_counts.completed == vector_store.file_counts.total:
print("Le chargement du vecteur est terminé.")
demorrha.add_vector_store_to_ressource(vector_store.id)
else:
file_paths = ["style.txt"]
file_batch = demorrha.add_file_to_vector_store(file_paths)
print("Fichier ajouté au vector_store:", file_batch)
# Exemple d'utilsation de l'assistant
demorrha.load_assistant("Demorrha")
demorrha.update_vector_store_ids()
demorrha.set_payload("Tu dois faire preuve de courage pour trouver la force.", "Traduit le texte en Anglais. Et applique les instructions du fichier \'style.txt\'")
response_generator = demorrha.use_assistant()
final_response = ""
for response in response_generator:
print(response, end="\r")
final_response = response
print(f"\nRéponse finale de l'assistant: {final_response}")