Spaces:
Runtime error
Runtime error
"""Simple Reasoning Agent using local models""" | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
class ReasoningAgent: | |
"""A simple reasoning agent using local models.""" | |
def __init__(self, model_name="TinyLlama/TinyLlama-1.1B-Chat-v1.0"): | |
"""Initialize the agent with a local model.""" | |
# Load model and tokenizer | |
self.tokenizer = AutoTokenizer.from_pretrained(model_name) | |
self.model = AutoModelForCausalLM.from_pretrained( | |
model_name, | |
torch_dtype=torch.float16, | |
device_map="auto" | |
) | |
def get_response(self, query: str) -> str: | |
"""Generate a response using the local model.""" | |
try: | |
# Format the prompt | |
prompt = f"""<|system|> | |
You are a helpful AI assistant. | |
</s> | |