Update app.py
Browse files
app.py
CHANGED
@@ -246,6 +246,7 @@ app = gr.Interface(
|
|
246 |
|
247 |
####
|
248 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig, TextIteratorStreamer
|
|
|
249 |
|
250 |
model_id = "philschmid/instruct-igel-001"
|
251 |
model = AutoModelForCausalLM.from_pretrained(model_id, low_cpu_mem_usage=True)
|
@@ -335,7 +336,7 @@ def generate(instruction, temperature=1.0, max_new_tokens=256, top_p=0.9, length
|
|
335 |
###
|
336 |
classifier = pipeline("zero-shot-classification")
|
337 |
text = "This is a tutorial about Hugging Face."
|
338 |
-
candidate_labels = ["
|
339 |
|
340 |
def topic_sale_inform (text):
|
341 |
res = classifier(text, candidate_labels)
|
|
|
246 |
|
247 |
####
|
248 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig, TextIteratorStreamer
|
249 |
+
from threading import Thread
|
250 |
|
251 |
model_id = "philschmid/instruct-igel-001"
|
252 |
model = AutoModelForCausalLM.from_pretrained(model_id, low_cpu_mem_usage=True)
|
|
|
336 |
###
|
337 |
classifier = pipeline("zero-shot-classification")
|
338 |
text = "This is a tutorial about Hugging Face."
|
339 |
+
candidate_labels = ["informieren", "kaufen", "beschweren", "verkaufen"]
|
340 |
|
341 |
def topic_sale_inform (text):
|
342 |
res = classifier(text, candidate_labels)
|