File size: 569 Bytes
5d60b9f
 
 
 
 
 
 
 
15913c3
 
5d60b9f
 
 
15913c3
5d60b9f
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
import torch
from transformers import pipeline

pipe = pipeline(
    "text-generation",
    model="silma-ai/SILMA-9B-Instruct-v1.0",
    model_kwargs={"torch_dtype": torch.bfloat16},
    device="cuda",  # replace with "mps" to run on a Mac device
)

messages = [
    {"role": "user", "content": "ุงูƒุชุจ ุฑุณุงู„ุฉ ุชุนุชุฐุฑ ููŠู‡ุง ู„ู…ุฏูŠุฑูŠ ููŠ ุงู„ุนู…ู„ ุนู† ุงู„ุญุถูˆุฑ ุงู„ูŠูˆู… ู„ุฃุณุจุงุจ ู…ุฑุถูŠุฉ."},
]

outputs = pipe(messages, max_new_tokens=256)
assistant_response = outputs[0]["generated_text"][-1]["content"].strip()
print(assistant_response)