Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,13 +12,13 @@ def load_model():
|
|
12 |
# Load the model once
|
13 |
client = load_model()
|
14 |
|
15 |
-
# Define prompt templates with
|
16 |
def create_prompt(user_message):
|
17 |
return f"""
|
18 |
-
You are
|
19 |
|
20 |
User: {user_message}
|
21 |
-
|
22 |
"""
|
23 |
|
24 |
# Function to process the query using the open-source LLM for general chat
|
@@ -42,7 +42,7 @@ def chat_with_llm(query):
|
|
42 |
if 'choices' in message and message['choices']:
|
43 |
delta_content = message['choices'][0]['delta'].get('content', '')
|
44 |
response_text += delta_content
|
45 |
-
response_container.write(response_text) # Update response in real-time
|
46 |
return response_text
|
47 |
except Exception as e:
|
48 |
st.error(f"An error occurred: {e}")
|
@@ -50,18 +50,24 @@ def chat_with_llm(query):
|
|
50 |
# Function to process the query for search intent
|
51 |
def process_query_with_llm(query):
|
52 |
prompt = f"User asked: '{query}'. What would be the best search query to use?"
|
53 |
-
|
54 |
-
|
|
|
|
|
55 |
|
56 |
# Function to perform a Google search using the googlesearch-python package
|
57 |
def search_web(query):
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
|
|
|
|
|
|
|
|
62 |
|
63 |
# Streamlit UI
|
64 |
-
st.title("Interactive Chatbot")
|
65 |
|
66 |
# Input field for user query
|
67 |
user_input = st.text_input("You:", "")
|
@@ -95,9 +101,8 @@ if user_input:
|
|
95 |
st.write("Sorry, I couldn't find any relevant links.")
|
96 |
else:
|
97 |
# Handle general conversation with response streaming
|
98 |
-
st.write("**Chatbot is typing...**")
|
99 |
response = chat_with_llm(user_input)
|
100 |
-
|
101 |
|
102 |
# import streamlit as st
|
103 |
# from huggingface_hub import InferenceClient
|
|
|
12 |
# Load the model once
|
13 |
client = load_model()
|
14 |
|
15 |
+
# Define prompt templates with the assistant's new persona
|
16 |
def create_prompt(user_message):
|
17 |
return f"""
|
18 |
+
You are Katheriya, a skilled data scientist who helps users find the best information from around the globe. You are highly knowledgeable and provide insightful, detailed responses.
|
19 |
|
20 |
User: {user_message}
|
21 |
+
Katheriya:
|
22 |
"""
|
23 |
|
24 |
# Function to process the query using the open-source LLM for general chat
|
|
|
42 |
if 'choices' in message and message['choices']:
|
43 |
delta_content = message['choices'][0]['delta'].get('content', '')
|
44 |
response_text += delta_content
|
45 |
+
response_container.write(f"**Katheriya:** {response_text}") # Update response in real-time
|
46 |
return response_text
|
47 |
except Exception as e:
|
48 |
st.error(f"An error occurred: {e}")
|
|
|
50 |
# Function to process the query for search intent
|
51 |
def process_query_with_llm(query):
|
52 |
prompt = f"User asked: '{query}'. What would be the best search query to use?"
|
53 |
+
|
54 |
+
# Generate response using text_generation without max_length
|
55 |
+
response = client.text_generation(prompt) # Removed max_length and num_return_sequences
|
56 |
+
return response[0]['generated_text'].strip() if response else "No query generated."
|
57 |
|
58 |
# Function to perform a Google search using the googlesearch-python package
|
59 |
def search_web(query):
|
60 |
+
try:
|
61 |
+
search_results = []
|
62 |
+
for result in search(query, num_results=10):
|
63 |
+
search_results.append(result)
|
64 |
+
return search_results
|
65 |
+
except Exception as e:
|
66 |
+
st.error(f"An error occurred during web search: {e}")
|
67 |
+
return []
|
68 |
|
69 |
# Streamlit UI
|
70 |
+
st.title("Interactive Chatbot - Powered by Katheriya")
|
71 |
|
72 |
# Input field for user query
|
73 |
user_input = st.text_input("You:", "")
|
|
|
101 |
st.write("Sorry, I couldn't find any relevant links.")
|
102 |
else:
|
103 |
# Handle general conversation with response streaming
|
|
|
104 |
response = chat_with_llm(user_input)
|
105 |
+
|
106 |
|
107 |
# import streamlit as st
|
108 |
# from huggingface_hub import InferenceClient
|