Xennus's picture
Update app.py
287b981 verified
raw
history blame contribute delete
622 Bytes
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import AutoModelForCausalLM, AutoTokenizer
app = FastAPI()
model_name = "Xennus/niko-mistral-cyberbot"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
model.eval()
class Request(BaseModel):
prompt: str
@app.post("/generate")
async def generate(request: Request):
inputs = tokenizer(request.prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_length=200)
text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return {"response": text}