Spaces:
Runtime error
Runtime error
Fix trailing new line
Browse files- app.py +3 -1
- dialogues.py +1 -1
app.py
CHANGED
|
@@ -192,13 +192,15 @@ examples = [
|
|
| 192 |
def clear_chat():
|
| 193 |
return [], []
|
| 194 |
|
|
|
|
| 195 |
def delete_last_turn(chat, history):
|
| 196 |
if chat and history:
|
| 197 |
chat.pop(-1)
|
| 198 |
history.pop(-1)
|
| 199 |
history.pop(-1)
|
| 200 |
return chat, history
|
| 201 |
-
|
|
|
|
| 202 |
def process_example(args):
|
| 203 |
for [x, y] in generate(args):
|
| 204 |
pass
|
|
|
|
| 192 |
def clear_chat():
|
| 193 |
return [], []
|
| 194 |
|
| 195 |
+
|
| 196 |
def delete_last_turn(chat, history):
|
| 197 |
if chat and history:
|
| 198 |
chat.pop(-1)
|
| 199 |
history.pop(-1)
|
| 200 |
history.pop(-1)
|
| 201 |
return chat, history
|
| 202 |
+
|
| 203 |
+
|
| 204 |
def process_example(args):
|
| 205 |
for [x, y] in generate(args):
|
| 206 |
pass
|
dialogues.py
CHANGED
|
@@ -62,7 +62,7 @@ class DialogueTemplate(ModelHubMixin):
|
|
| 62 |
prompt += self.user_token + "\n" + message["content"] + self.end_token + "\n"
|
| 63 |
else:
|
| 64 |
prompt += self.assistant_token + "\n" + message["content"] + self.end_token + "\n"
|
| 65 |
-
prompt += self.assistant_token
|
| 66 |
return prompt
|
| 67 |
|
| 68 |
def get_dialogue(self):
|
|
|
|
| 62 |
prompt += self.user_token + "\n" + message["content"] + self.end_token + "\n"
|
| 63 |
else:
|
| 64 |
prompt += self.assistant_token + "\n" + message["content"] + self.end_token + "\n"
|
| 65 |
+
prompt += self.assistant_token + "\n"
|
| 66 |
return prompt
|
| 67 |
|
| 68 |
def get_dialogue(self):
|