Spaces:
Runtime error
Runtime error
File size: 1,223 Bytes
144b240 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import gradio as gr
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stable-code-3b", trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
"stabilityai/stable-code-3b",
trust_remote_code=True,
torch_dtype="auto"
)
class StopOnTokens(StoppingCriteria):
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
stop_ids = [0, 2]
for stop_id in stop_ids:
if input_ids[0][-1] == stop_id:
return True
return False
def chat(message, history):
stop = StopOnTokens()
history = history or []
inputs = tokenizer(message, return_tensors="pt").to(model.device)
print('generate')
tokens = model.generate(
**inputs,
max_new_tokens=4096,
temperature=0.2,
do_sample=True,
)
print('decode')
response = tokenizer.decode(tokens[0], skip_special_tokens=True)
history.append((message, response))
return history, history
iface = gr.Interface(
chat,
["text", "state"],
["chatbot", "state"],
allow_flagging="never"
)
iface.launch()
|