customstreamer
Browse files
app.py
CHANGED
|
@@ -154,8 +154,12 @@ class BSIChatbot:
|
|
| 154 |
Question: {query}"""
|
| 155 |
|
| 156 |
input_ids = self.llmtokenizer(final_prompt, return_tensors="pt").to(self.llmpipeline.model.device)
|
| 157 |
-
|
| 158 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 159 |
|
| 160 |
#input_ids = self.llmtokenizer(final_prompt, return_tensors="pt").to(self.llmpipeline.model.device)
|
| 161 |
#streamer = TextIteratorStreamer(self.llmtokenizer, skip_prompt=True)
|
|
|
|
| 154 |
Question: {query}"""
|
| 155 |
|
| 156 |
input_ids = self.llmtokenizer(final_prompt, return_tensors="pt").to(self.llmpipeline.model.device)
|
| 157 |
+
streamer=self.streamer
|
| 158 |
+
|
| 159 |
+
self.llmpipeline.model.generate(input_ids=input_ids, streamer=streamer, max_new_tokens=500, temperature=0.7, repetition_penalty=1.1)
|
| 160 |
+
|
| 161 |
+
for text in streamer:
|
| 162 |
+
yield text
|
| 163 |
|
| 164 |
#input_ids = self.llmtokenizer(final_prompt, return_tensors="pt").to(self.llmpipeline.model.device)
|
| 165 |
#streamer = TextIteratorStreamer(self.llmtokenizer, skip_prompt=True)
|