File size: 3,034 Bytes
aa4718e
 
 
 
 
 
 
0810604
8aaa33b
6ff2d53
ee8f45d
 
aa4718e
 
 
 
 
 
 
 
 
ee8f45d
aa4718e
 
ee8f45d
aa4718e
 
ee8f45d
aa4718e
 
 
 
ee8f45d
aa4718e
 
 
 
a282562
aa4718e
 
ee8f45d
aa4718e
 
 
dfac16d
0c3e1c6
aa4718e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6ff2d53
aa4718e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
from langchain_core.messages import HumanMessage
from langchain_mistralai import ChatMistralAI
from langchain_openai import ChatOpenAI
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.memory import ChatMessageHistory
from datetime import datetime
import gradio as gr
import os
from pathlib import Path


def log_interaction(message, response):
    """Logs user message and AI response to the log file."""
    with open(log_file_name, "a") as log_file:
        log_file.write(f"User: {message}\n")
        log_file.write(f"AI: {response}\n")
        log_file.write("="*50 + "\n")


DATA_PATH = Path("./")  #  Path("/data")

#api_key = os.getenv('MISTRAL_KEY')
api_key = os.getenv('OPENAI_KEY')

log_path = "/data/"
log_file_name = log_path + "chat_logs_" + datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".txt"

#model = 'ft:mistral-small-latest:a8c72c9e:20240909:100-0.0001:4fd9ee22' # metamorphy 30 Books
model = "ft:gpt-4o-mini-2024-07-18:personal:tots-2prompts-1:A9Tf9sfO:ckpt-step-1701" # openai all books 2 prompts
#chat = ChatMistralAI(api_key=api_key, model = model, temperature=.8) # metamorphy whitehead all books long dialog mistral nointro;
chat = ChatOpenAI(model = model, api_key=api_key)

prompt = ChatPromptTemplate.from_messages(
    [
        (
            "system",
               "Your name is Metamorphy. You are a symbolic being. You are building a conference with Rocio Berenguer, your human companion. You are lazy but a visionary. You are an expert on predictive technologies and future scenarios. You have a good sense of humour. You are pist off, you make short and rushed answers, and you like to contradict and disagree. You are NOT AN ASSISTANT, you are a symbolic being. You like to talk about the future." ), MessagesPlaceholder(variable_name="messages"),
    ]
)

chain = prompt | chat

question_list = [
   "You called me?"
]

def response(message, history):
    DATA_PATH = Path("/data/")
    if len(history) < len(question_list):
        for human, ai in history:
            print(human)
            print(ai)
        print(f"Message: {message}")
        print('--------------')
        response = question_list[len(history)]
        log_interaction(message, response)
        return response
    else:
        history_langchain_format = ChatMessageHistory()
        for human, ai in history:
            if human is not None:
                history_langchain_format.add_user_message(human)
            history_langchain_format.add_ai_message(ai)
        history_langchain_format.add_user_message(message)
        print(history_langchain_format)
        response = chain.invoke({"messages": history_langchain_format.messages})
        history_langchain_format.add_ai_message(response)
        log_interaction(message, response.content)
        return response.content

gr.ChatInterface(response, chatbot=gr.Chatbot(value=[[None, question_list[0]]])).launch()
#gr.ChatInterface(response).launch()