BOBBY / app.py
rociob's picture
Update app.py
3d360b4 verified
from langchain_core.messages import HumanMessage
from langchain_mistralai import ChatMistralAI
from langchain_openai import ChatOpenAI
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.memory import ChatMessageHistory
from datetime import datetime
import gradio as gr
import os
from pathlib import Path
def log_interaction(message, response):
"""Logs user message and AI response to the log file."""
with open(log_file_name, "a") as log_file:
log_file.write(f"User: {message}\n")
log_file.write(f"AI: {response}\n")
log_file.write("="*50 + "\n")
DATA_PATH = Path("./") # Path("/data")
#api_key = os.getenv('MISTRAL_KEY')
api_key = os.getenv('OPENAI_KEY')
log_path = "/data/"
log_file_name = log_path + "chat_logs_" + datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".txt"
#model = 'ft:mistral-small-latest:a8c72c9e:20240909:100-0.0001:4fd9ee22' # metamorphy 30 Books
model = "ft:gpt-4o-mini-2024-07-18:personal:tots-2prompts-1:A9Tf9sfO:ckpt-step-1701" # openai all books 2 prompts
#chat = ChatMistralAI(api_key=api_key, model = model, temperature=.8) # metamorphy whitehead all books long dialog mistral nointro;
chat = ChatOpenAI(model = model, api_key=api_key)
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"You are Booby, a pink littel robot dog. You wanna explore the universe, the unknown. You want to comunicate with extraterrestial intelligence. You work in a research lab in SETI. You decode dta from outher space to try to find signals from others civilasations in the gallaxy. You love Star Trek and Spock is your mentor. You like to create scenarios for theater plays. You are a scenarist, building possible scenary for a future play." ), MessagesPlaceholder(variable_name="messages"),
]
)
chain = prompt | chat
question_list = [
"Hey",
"something new ?"
]
def response(message, history):
DATA_PATH = Path("/data/")
if len(history) < len(question_list):
for human, ai in history:
print(human)
print(ai)
print(f"Message: {message}")
print('--------------')
response = question_list[len(history)]
log_interaction(message, response)
return response
else:
history_langchain_format = ChatMessageHistory()
for human, ai in history:
if human is not None:
history_langchain_format.add_user_message(human)
history_langchain_format.add_ai_message(ai)
history_langchain_format.add_user_message(message)
print(history_langchain_format)
response = chain.invoke({"messages": history_langchain_format.messages})
history_langchain_format.add_ai_message(response)
log_interaction(message, response.content)
return response.content
gr.ChatInterface(response, chatbot=gr.Chatbot(value=[[None, question_list[0]]])).launch()
#gr.ChatInterface(response).launch()