vishalkatheriya commited on
Commit
7e1285f
·
verified ·
1 Parent(s): 92a4db3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -3
app.py CHANGED
@@ -51,9 +51,16 @@ def chat_with_llm(query):
51
  def process_query_with_llm(query):
52
  prompt = f"User asked: '{query}'. What would be the best search query to use?"
53
 
54
- # Generate response using text_generation without max_length
55
- response = client.text_generation(prompt) # Removed max_length and num_return_sequences
56
- return response[0]['generated_text'].strip() if response else "No query generated."
 
 
 
 
 
 
 
57
 
58
  # Function to perform a Google search using the googlesearch-python package
59
  def search_web(query):
@@ -104,6 +111,7 @@ if user_input:
104
  response = chat_with_llm(user_input)
105
 
106
 
 
107
  # import streamlit as st
108
  # from huggingface_hub import InferenceClient
109
  # from googlesearch import search
 
51
  def process_query_with_llm(query):
52
  prompt = f"User asked: '{query}'. What would be the best search query to use?"
53
 
54
+ # Generate response using text_generation without assuming the structure of the output
55
+ response = client.text_generation(prompt)
56
+
57
+ # Ensure response is in string format and handle errors
58
+ if isinstance(response, str):
59
+ return response.strip()
60
+ elif isinstance(response, list) and 'generated_text' in response[0]:
61
+ return response[0]['generated_text'].strip()
62
+ else:
63
+ return "No query generated."
64
 
65
  # Function to perform a Google search using the googlesearch-python package
66
  def search_web(query):
 
111
  response = chat_with_llm(user_input)
112
 
113
 
114
+
115
  # import streamlit as st
116
  # from huggingface_hub import InferenceClient
117
  # from googlesearch import search