Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,48 +1,47 @@
|
|
1 |
import openai
|
2 |
import streamlit as st
|
3 |
|
4 |
-
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
# Fine-tuned model name
|
8 |
MODEL_NAME = "ft:gpt-3.5-turbo-0125:brenin::AjNcIvnw"
|
9 |
|
10 |
-
# Streamlit
|
11 |
st.title("Chat with Fine-Tuned GPT-3.5 Turbo")
|
12 |
st.markdown("This chatbot uses a fine-tuned GPT-3.5 Turbo model.")
|
13 |
|
14 |
-
# Initialize
|
15 |
-
|
16 |
-
st.session_state.messages = [
|
17 |
-
{"role": "system", "content": "You are a German chatbot. You should help the user by answering their questions."}
|
18 |
-
]
|
19 |
|
20 |
# User input field
|
21 |
user_input = st.text_input("Enter your question:")
|
22 |
|
23 |
-
# If user submits a question
|
24 |
if user_input:
|
25 |
-
#
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
messages=st.session_state.messages
|
33 |
-
)
|
34 |
-
|
35 |
-
# Extract the assistant's reply
|
36 |
-
assistant_reply = response['choices'][0]['message']['content']
|
37 |
-
st.session_state.messages.append({"role": "assistant", "content": assistant_reply})
|
38 |
-
|
39 |
-
except Exception as e:
|
40 |
-
assistant_reply = f"Error: {str(e)}"
|
41 |
-
st.session_state.messages.append({"role": "assistant", "content": assistant_reply})
|
42 |
-
|
43 |
-
# Display the conversation
|
44 |
-
for message in st.session_state.messages:
|
45 |
-
if message["role"] == "user":
|
46 |
-
st.markdown(f"**User:** {message['content']}")
|
47 |
-
elif message["role"] == "assistant":
|
48 |
-
st.markdown(f"**Assistant:** {message['content']}")
|
|
|
1 |
import openai
|
2 |
import streamlit as st
|
3 |
|
4 |
+
class FineTunedChatbot:
|
5 |
+
def __init__(self, api_key, model_name):
|
6 |
+
openai.api_key = api_key
|
7 |
+
self.client = openai
|
8 |
+
self.model_name = model_name
|
9 |
+
|
10 |
+
def get_response(self, query_text):
|
11 |
+
try:
|
12 |
+
# Use client.chat.completions.create
|
13 |
+
response = self.client.chat.completions.create(
|
14 |
+
model=self.model_name,
|
15 |
+
messages=[
|
16 |
+
{"role": "system", "content": "You are a German chatbot. You should help the user by answering their question."},
|
17 |
+
{"role": "user", "content": query_text}
|
18 |
+
]
|
19 |
+
)
|
20 |
+
# Extract the response content
|
21 |
+
answer = response.choices[0].message.content
|
22 |
+
return {'answer': answer}
|
23 |
+
|
24 |
+
except Exception as e:
|
25 |
+
return {'error': f'Error processing query: {str(e)}'}
|
26 |
|
27 |
# Fine-tuned model name
|
28 |
MODEL_NAME = "ft:gpt-3.5-turbo-0125:brenin::AjNcIvnw"
|
29 |
|
30 |
+
# Streamlit app UI
|
31 |
st.title("Chat with Fine-Tuned GPT-3.5 Turbo")
|
32 |
st.markdown("This chatbot uses a fine-tuned GPT-3.5 Turbo model.")
|
33 |
|
34 |
+
# Initialize chatbot
|
35 |
+
chatbot = FineTunedChatbot(api_key=st.secrets["OPENAI_API_KEY"], model_name=MODEL_NAME)
|
|
|
|
|
|
|
36 |
|
37 |
# User input field
|
38 |
user_input = st.text_input("Enter your question:")
|
39 |
|
|
|
40 |
if user_input:
|
41 |
+
# Fetch response from chatbot
|
42 |
+
response = chatbot.get_response(user_input)
|
43 |
+
|
44 |
+
if 'answer' in response:
|
45 |
+
st.markdown(f"**Assistant:** {response['answer']}")
|
46 |
+
else:
|
47 |
+
st.markdown(f"**Error:** {response['error']}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|