Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -54,13 +54,10 @@ css = """
|
|
54 |
}
|
55 |
"""
|
56 |
|
57 |
-
# <span style="display: inline-flex; align-items: center; border-radius: 0.375rem; background-color: rgba(79, 70, 229, 0.1); padding: 0.1rem 0.75rem; font-size: 0.75rem; font-weight: 500; color: #60a5fa; margin-top: 2.5px;">
|
58 |
-
# Meta Llama 3 8B Instruct
|
59 |
-
# </span>
|
60 |
PLACEHOLDER = """
|
61 |
<div class="message-bubble-border" style="display:flex; max-width: 600px; border-width: 1px; border-radius: 8px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); backdrop-filter: blur(10px);">
|
62 |
<figure style="margin: 0;">
|
63 |
-
<img src="https://
|
64 |
</figure>
|
65 |
<div style="padding: .5rem 1.5rem;">
|
66 |
<h2 style="text-align: left; font-size: 1.5rem; font-weight: 700; margin-bottom: 0.5rem;">llama-cpp-agent</h2>
|
@@ -70,6 +67,9 @@ PLACEHOLDER = """
|
|
70 |
<span style="display: inline-flex; align-items: center; border-radius: 0.375rem; background-color: rgba(229, 70, 77, 0.1); padding: 0.1rem 0.75rem; font-size: 0.75rem; font-weight: 500; color: #f88181; margin-bottom: 2.5px;">
|
71 |
Mistral 7B Instruct v0.3
|
72 |
</span>
|
|
|
|
|
|
|
73 |
</div>
|
74 |
<div style="display: flex; justify-content: flex-end; align-items: center;">
|
75 |
<a href="https://discord.gg/sRMvWKrh" target="_blank" rel="noreferrer" style="padding: .5rem;">
|
@@ -122,6 +122,7 @@ def write_message_to_user():
|
|
122 |
def respond(
|
123 |
message,
|
124 |
history: list[tuple[str, str]],
|
|
|
125 |
system_message,
|
126 |
max_tokens,
|
127 |
temperature,
|
@@ -131,7 +132,7 @@ def respond(
|
|
131 |
):
|
132 |
chat_template = get_messages_formatter_type(model)
|
133 |
llm = Llama(
|
134 |
-
model_path=f"models/{
|
135 |
flash_attn=True,
|
136 |
n_threads=40,
|
137 |
n_gpu_layers=81,
|
@@ -229,13 +230,13 @@ def respond(
|
|
229 |
demo = gr.ChatInterface(
|
230 |
respond,
|
231 |
additional_inputs=[
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
gr.Textbox(value=web_search_system_prompt, label="System message"),
|
240 |
gr.Slider(minimum=1, maximum=4096, value=2048, step=1, label="Max tokens"),
|
241 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.45, step=0.1, label="Temperature"),
|
|
|
54 |
}
|
55 |
"""
|
56 |
|
|
|
|
|
|
|
57 |
PLACEHOLDER = """
|
58 |
<div class="message-bubble-border" style="display:flex; max-width: 600px; border-width: 1px; border-radius: 8px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); backdrop-filter: blur(10px);">
|
59 |
<figure style="margin: 0;">
|
60 |
+
<img src="https://huggingface.co/spaces/poscye/ddg-web-search-chat/resolve/main/logo.jpg" alt="Logo" style="width: 100%; height: 100%; border-radius: 8px;">
|
61 |
</figure>
|
62 |
<div style="padding: .5rem 1.5rem;">
|
63 |
<h2 style="text-align: left; font-size: 1.5rem; font-weight: 700; margin-bottom: 0.5rem;">llama-cpp-agent</h2>
|
|
|
67 |
<span style="display: inline-flex; align-items: center; border-radius: 0.375rem; background-color: rgba(229, 70, 77, 0.1); padding: 0.1rem 0.75rem; font-size: 0.75rem; font-weight: 500; color: #f88181; margin-bottom: 2.5px;">
|
68 |
Mistral 7B Instruct v0.3
|
69 |
</span>
|
70 |
+
<span style="display: inline-flex; align-items: center; border-radius: 0.375rem; background-color: rgba(79, 70, 229, 0.1); padding: 0.1rem 0.75rem; font-size: 0.75rem; font-weight: 500; color: #60a5fa; margin-top: 2.5px;">
|
71 |
+
Meta Llama 3 8B Instruct
|
72 |
+
</span>
|
73 |
</div>
|
74 |
<div style="display: flex; justify-content: flex-end; align-items: center;">
|
75 |
<a href="https://discord.gg/sRMvWKrh" target="_blank" rel="noreferrer" style="padding: .5rem;">
|
|
|
122 |
def respond(
|
123 |
message,
|
124 |
history: list[tuple[str, str]],
|
125 |
+
model,
|
126 |
system_message,
|
127 |
max_tokens,
|
128 |
temperature,
|
|
|
132 |
):
|
133 |
chat_template = get_messages_formatter_type(model)
|
134 |
llm = Llama(
|
135 |
+
model_path=f"models/{model}",
|
136 |
flash_attn=True,
|
137 |
n_threads=40,
|
138 |
n_gpu_layers=81,
|
|
|
230 |
demo = gr.ChatInterface(
|
231 |
respond,
|
232 |
additional_inputs=[
|
233 |
+
gr.Dropdown([
|
234 |
+
'Mistral-7B-Instruct-v0.3-Q6_K.gguf',
|
235 |
+
'Meta-Llama-3-8B-Instruct-Q6_K.gguf'
|
236 |
+
],
|
237 |
+
value="Mistral-7B-Instruct-v0.3-Q6_K.gguf",
|
238 |
+
label="Model"
|
239 |
+
),
|
240 |
gr.Textbox(value=web_search_system_prompt, label="System message"),
|
241 |
gr.Slider(minimum=1, maximum=4096, value=2048, step=1, label="Max tokens"),
|
242 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.45, step=0.1, label="Temperature"),
|