Updated smoagents
Browse files
app.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
from smolagents import CodeAgent,
|
| 2 |
import datetime
|
| 3 |
import requests
|
| 4 |
import pytz
|
|
@@ -38,7 +38,7 @@ final_answer = FinalAnswerTool()
|
|
| 38 |
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
|
| 39 |
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
|
| 40 |
|
| 41 |
-
model =
|
| 42 |
max_tokens=4192,
|
| 43 |
temperature=0.5,
|
| 44 |
model_id='meta-llama/Llama-4-Maverick-17B-128E-Instruct',# it is possible that this model may be overloaded
|
|
@@ -55,7 +55,7 @@ image_generation_tool = load_tool(
|
|
| 55 |
)
|
| 56 |
|
| 57 |
# Web search tool
|
| 58 |
-
search_tool=
|
| 59 |
|
| 60 |
with open("prompts.yaml", 'r') as stream:
|
| 61 |
prompt_templates = yaml.safe_load(stream)
|
|
|
|
| 1 |
+
from smolagents import CodeAgent,WebSearchTool, InferenceClientModel,load_tool,tool
|
| 2 |
import datetime
|
| 3 |
import requests
|
| 4 |
import pytz
|
|
|
|
| 38 |
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
|
| 39 |
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
|
| 40 |
|
| 41 |
+
model = InferenceClientModel(
|
| 42 |
max_tokens=4192,
|
| 43 |
temperature=0.5,
|
| 44 |
model_id='meta-llama/Llama-4-Maverick-17B-128E-Instruct',# it is possible that this model may be overloaded
|
|
|
|
| 55 |
)
|
| 56 |
|
| 57 |
# Web search tool
|
| 58 |
+
search_tool=WebSearchTool()
|
| 59 |
|
| 60 |
with open("prompts.yaml", 'r') as stream:
|
| 61 |
prompt_templates = yaml.safe_load(stream)
|