Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -38,10 +38,15 @@ def respond(
|
|
38 |
n_batch=1024,
|
39 |
n_ctx=8192,
|
40 |
)
|
41 |
-
|
42 |
provider = LlamaCppPythonProvider(llm)
|
43 |
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
settings = provider.get_provider_default_settings()
|
46 |
settings.temperature = temperature
|
47 |
settings.top_k = top_k
|
@@ -50,34 +55,32 @@ def respond(
|
|
50 |
settings.repeat_penalty = repeat_penalty
|
51 |
settings.stream = True
|
52 |
|
53 |
-
# Prepare chat history
|
54 |
messages = BasicChatHistory()
|
55 |
|
56 |
for msn in history:
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
yield f"Error: {str(e)}"
|
81 |
|
82 |
|
83 |
def create_interface(model_name):
|
|
|
38 |
n_batch=1024,
|
39 |
n_ctx=8192,
|
40 |
)
|
|
|
41 |
provider = LlamaCppPythonProvider(llm)
|
42 |
|
43 |
+
agent = LlamaCppAgent(
|
44 |
+
provider,
|
45 |
+
system_prompt=f"{system_message}",
|
46 |
+
predefined_messages_formatter_type=chat_template,
|
47 |
+
debug_output=True
|
48 |
+
)
|
49 |
+
|
50 |
settings = provider.get_provider_default_settings()
|
51 |
settings.temperature = temperature
|
52 |
settings.top_k = top_k
|
|
|
55 |
settings.repeat_penalty = repeat_penalty
|
56 |
settings.stream = True
|
57 |
|
|
|
58 |
messages = BasicChatHistory()
|
59 |
|
60 |
for msn in history:
|
61 |
+
user = {
|
62 |
+
'role': Roles.user,
|
63 |
+
'content': msn[0]
|
64 |
+
}
|
65 |
+
assistant = {
|
66 |
+
'role': Roles.assistant,
|
67 |
+
'content': msn[1]
|
68 |
+
}
|
69 |
+
messages.add_message(user)
|
70 |
+
messages.add_message(assistant)
|
71 |
+
|
72 |
+
stream = agent.get_chat_response(
|
73 |
+
message,
|
74 |
+
llm_sampling_settings=settings,
|
75 |
+
chat_history=messages,
|
76 |
+
returns_streaming_generator=True,
|
77 |
+
print_output=False
|
78 |
+
)
|
79 |
+
|
80 |
+
outputs = ""
|
81 |
+
for output in stream:
|
82 |
+
outputs += output
|
83 |
+
yield outputs
|
|
|
84 |
|
85 |
|
86 |
def create_interface(model_name):
|