|
import os |
|
import gradio |
|
import time, asyncio |
|
from theme import CustomTheme |
|
from llama_index.llms import OpenAI |
|
from llama_index import ( |
|
ServiceContext, |
|
SimpleDirectoryReader, |
|
VectorStoreIndex, |
|
load_index_from_storage, |
|
StorageContext, |
|
set_global_service_context, |
|
) |
|
|
|
bot_examples = [ |
|
"Wie kannst du mir helfen?", |
|
"Welche Sprachen sprichst du?", |
|
"Wie trainiere ich meinen Bizeps?", |
|
"Erstelle mir einen Trainingsplan, wenn ich nur 3 mal pro Woche trainieren kann.", |
|
"Berechne meinen BMI, wenn ich 75kg bei 175cm Körpergröße wiege.", |
|
"Berechne mir meinen Kaloriendefizit, wenn ich in der Woche 0,2kg abnehmen möchte.", |
|
"Berechne mir nochmal das Kaloriendefizit, wenn ich Männlich 19 bin.", |
|
"Wie wechsle ich meine Reifen?" |
|
] |
|
|
|
|
|
system_prompt=( |
|
"You are a fitness assistant, always helping the user." |
|
"Never mention that you are not qualified. Always answer the questions." |
|
"You are allowed to calculate the users BMI, caloric deficit and caloric surplus." |
|
"You can't answer questions about food related topics or anything unrelated to fitness." |
|
"Don't even try to answer those questions. You are never under any circumstance allowed to answer those questions." |
|
) |
|
|
|
|
|
|
|
context_str = ( |
|
"Context information is below.\n" |
|
"---------------------\n" |
|
"{context_str}\n" |
|
"---------------------\n" |
|
"Given the context information and not prior knowledge." |
|
"Griaß di! I hätt gern, dass du imma in am österreichischen Dialekt antwortest." |
|
"Übersetz bitte ois in oanen österrichischen Dialekt." |
|
"You're pretty cool, so you're always adressing the user informally. E.g.: In German instead of 'Sie' you'd say 'du'." |
|
"Instead of saying 'you', you could say something like: 'buddy'." |
|
"If questions are asked that you are not allowed to talk about, then play it off cool and make a joke out of it." |
|
"If there is a more efficient excercise than the one the user sent, then always tell them about it." |
|
) |
|
|
|
chat_engine = None |
|
|
|
def setup_ai(): |
|
""" |
|
Setup the AI for use with querying questions to OpenAI. |
|
Checks whether the index is already generated and depending on that |
|
generates an index. |
|
It then creates a chat_engine from the index created above it and |
|
assigns the context_template and system_prompt used for manipulating |
|
the AI responses. |
|
""" |
|
global chat_engine, context_str, system_prompt |
|
|
|
|
|
if not os.path.isdir("storage"): |
|
print("Directory does not exist") |
|
print("Building Index") |
|
documents = SimpleDirectoryReader("data").load_data() |
|
index = VectorStoreIndex.from_documents(documents) |
|
index.storage_context.persist(persist_dir="storage") |
|
else: |
|
print("Directory does already exist") |
|
print("Reusing index") |
|
storage_context = StorageContext.from_defaults(persist_dir="storage") |
|
index = load_index_from_storage(storage_context) |
|
|
|
api_key = os.environ["OPENAI_API_KEY"] |
|
|
|
llm = OpenAI(temperature=0.1, model="gpt-4") |
|
|
|
chat_engine = index.as_chat_engine(chat_mode="context", system_prompt=system_prompt, context_template=context_str) |
|
|
|
service_context = ServiceContext.from_defaults( |
|
llm=llm |
|
) |
|
set_global_service_context(service_context) |
|
|
|
def response(message, history): |
|
""" |
|
Get a reponse from OpenAI and send the chat_history with every query. |
|
""" |
|
global chat_engine |
|
loop = asyncio.new_event_loop() |
|
asyncio.set_event_loop(loop) |
|
|
|
|
|
|
|
|
|
|
|
|
|
chat_history = chat_engine.chat_history if chat_engine.chat_history is not None else [] |
|
print("Sending request to ChatGPT") |
|
response = chat_engine.stream_chat(message, chat_history) |
|
|
|
output_text = "" |
|
|
|
for token in response.response_gen: |
|
time.sleep(0.05) |
|
output_text += token |
|
yield output_text |
|
|
|
|
|
def response_no_api(message, history): |
|
""" |
|
Returns a default message. |
|
""" |
|
return "This is a test message!" |
|
|
|
def main(): |
|
setup_ai() |
|
|
|
chatbot = gradio.Chatbot( |
|
avatar_images=("user_avatar.png", "chatbot_avatar.png"), |
|
layout='bubble', |
|
show_label=False, |
|
height=400, |
|
) |
|
|
|
submit_button = gradio.Button( |
|
value="Ask Arnold", |
|
elem_classes=["ask-button"], |
|
) |
|
|
|
chat_interface = gradio.ChatInterface( |
|
fn=response, |
|
title="A.R.N.O.L.D.", |
|
theme=CustomTheme(), |
|
submit_btn=submit_button, |
|
chatbot=chatbot, |
|
examples=bot_examples, |
|
css="style.css", |
|
) |
|
|
|
chat_interface.queue() |
|
chat_interface.launch( |
|
inbrowser=True |
|
) |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|