Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -29,7 +29,7 @@ def search(query):
|
|
| 29 |
with requests.Session() as session:
|
| 30 |
resp = session.get(
|
| 31 |
url="https://www.google.com/search",
|
| 32 |
-
headers={"User-Agent": "Mozilla/5.0"},
|
| 33 |
params={"q": term, "num": 3, "udm": 14},
|
| 34 |
timeout=5,
|
| 35 |
verify=None,
|
|
@@ -41,7 +41,7 @@ def search(query):
|
|
| 41 |
link = result.find("a", href=True)
|
| 42 |
link = link["href"]
|
| 43 |
try:
|
| 44 |
-
webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0"}, timeout=5, verify=False)
|
| 45 |
webpage.raise_for_status()
|
| 46 |
visible_text = extract_text_from_webpage(webpage.text)
|
| 47 |
if len(visible_text) > max_chars_per_page:
|
|
@@ -52,7 +52,7 @@ def search(query):
|
|
| 52 |
return all_results
|
| 53 |
|
| 54 |
# Initialize inference clients for different models
|
| 55 |
-
|
| 56 |
client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
|
| 57 |
client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
| 58 |
|
|
@@ -87,7 +87,7 @@ def respond(message, history):
|
|
| 87 |
messages.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message} {vqa}'})
|
| 88 |
|
| 89 |
# Call the LLM for response generation
|
| 90 |
-
response =
|
| 91 |
response = str(response)
|
| 92 |
try:
|
| 93 |
response = response[int(response.find("{")):int(response.index("</"))]
|
|
|
|
| 29 |
with requests.Session() as session:
|
| 30 |
resp = session.get(
|
| 31 |
url="https://www.google.com/search",
|
| 32 |
+
headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"},
|
| 33 |
params={"q": term, "num": 3, "udm": 14},
|
| 34 |
timeout=5,
|
| 35 |
verify=None,
|
|
|
|
| 41 |
link = result.find("a", href=True)
|
| 42 |
link = link["href"]
|
| 43 |
try:
|
| 44 |
+
webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"}, timeout=5, verify=False)
|
| 45 |
webpage.raise_for_status()
|
| 46 |
visible_text = extract_text_from_webpage(webpage.text)
|
| 47 |
if len(visible_text) > max_chars_per_page:
|
|
|
|
| 52 |
return all_results
|
| 53 |
|
| 54 |
# Initialize inference clients for different models
|
| 55 |
+
client_gemma = InferenceClient("google/gemma-1.1-7b-it")
|
| 56 |
client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
|
| 57 |
client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
|
| 58 |
|
|
|
|
| 87 |
messages.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message} {vqa}'})
|
| 88 |
|
| 89 |
# Call the LLM for response generation
|
| 90 |
+
response = client_gemma.chat_completion(messages, max_tokens=150)
|
| 91 |
response = str(response)
|
| 92 |
try:
|
| 93 |
response = response[int(response.find("{")):int(response.index("</"))]
|