Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -228,6 +228,7 @@ with gr.Blocks(title="Qwen3 Chat", css=css) as demo:
|
|
228 |
lines=2,
|
229 |
show_label=False
|
230 |
)
|
|
|
231 |
|
232 |
gr.HTML("""
|
233 |
<div class="footer">
|
@@ -242,15 +243,35 @@ with gr.Blocks(title="Qwen3 Chat", css=css) as demo:
|
|
242 |
clr.click(fn=lambda: ([], ""), outputs=[chat, txt])
|
243 |
cnl.click(fn=cancel_generation)
|
244 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
245 |
txt.submit(
|
246 |
-
fn=
|
247 |
-
chat_response(
|
248 |
-
msg, history, prompt,
|
249 |
-
get_model_name(model), tok, temp, k, p, rp
|
250 |
-
),
|
251 |
inputs=[txt, chat, sys_prompt,
|
252 |
model_dd, max_tok, temp, k, p, rp],
|
253 |
-
outputs=[chat],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
show_progress=True
|
255 |
)
|
256 |
|
|
|
228 |
lines=2,
|
229 |
show_label=False
|
230 |
)
|
231 |
+
send_btn = gr.Button("Send", variant="primary", elem_classes="button-primary")
|
232 |
|
233 |
gr.HTML("""
|
234 |
<div class="footer">
|
|
|
243 |
clr.click(fn=lambda: ([], ""), outputs=[chat, txt])
|
244 |
cnl.click(fn=cancel_generation)
|
245 |
|
246 |
+
# Clear the text input after submitting the message
|
247 |
+
def submit_message(msg, history, prompt, model, tok, temp, k, p, rp):
|
248 |
+
return chat_response(
|
249 |
+
msg, history, prompt,
|
250 |
+
get_model_name(model), tok, temp, k, p, rp
|
251 |
+
), ""
|
252 |
+
|
253 |
txt.submit(
|
254 |
+
fn=submit_message,
|
|
|
|
|
|
|
|
|
255 |
inputs=[txt, chat, sys_prompt,
|
256 |
model_dd, max_tok, temp, k, p, rp],
|
257 |
+
outputs=[chat, txt],
|
258 |
+
show_progress=True
|
259 |
+
)
|
260 |
+
|
261 |
+
# Clear the text input after sending the message
|
262 |
+
def send_message(msg, history, prompt, model, tok, temp, k, p, rp):
|
263 |
+
return chat_response(
|
264 |
+
msg, history, prompt,
|
265 |
+
get_model_name(model), tok, temp, k, p, rp
|
266 |
+
), ""
|
267 |
+
|
268 |
+
send_btn.click(
|
269 |
+
fn=send_message,
|
270 |
+
inputs=[txt, chat, sys_prompt,
|
271 |
+
model_dd, max_tok, temp, k, p, rp],
|
272 |
+
outputs=[chat, txt],
|
273 |
+
show_progress=True
|
274 |
+
)
|
275 |
show_progress=True
|
276 |
)
|
277 |
|