Spaces:
Running
Running
SkyNetWalker
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -27,7 +27,7 @@ def respond(
|
|
27 |
):
|
28 |
print(f"Received message: {message}")
|
29 |
print(f"History: {history}")
|
30 |
-
print(f"
|
31 |
print(f"Max tokens: {max_tokens}, Temperature: {temperature}, Top-P: {top_p}")
|
32 |
print(f"Selected model: {model_name}")
|
33 |
|
@@ -83,15 +83,15 @@ with gr.Blocks() as demo:
|
|
83 |
gr.Markdown("# LLM Test (HF API)") # Add a title to the top of the UI
|
84 |
|
85 |
# Add the model dropdown above the chatbot
|
86 |
-
model_dropdown = gr.Dropdown(choices=models, value=models[0], label="Select Model")
|
87 |
|
88 |
# Use the existing ChatInterface
|
89 |
gr.ChatInterface(
|
90 |
respond,
|
91 |
additional_inputs=[
|
92 |
-
gr.Textbox(value="", label="System
|
93 |
-
gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max new tokens"),
|
94 |
-
gr.Slider(minimum=0.1, maximum=1.0, value=0.3, step=0.1, label="Temperature"),
|
95 |
gr.Slider(
|
96 |
minimum=0.1,
|
97 |
maximum=1.0,
|
|
|
27 |
):
|
28 |
print(f"Received message: {message}")
|
29 |
print(f"History: {history}")
|
30 |
+
print(f"System message: {system_message}")
|
31 |
print(f"Max tokens: {max_tokens}, Temperature: {temperature}, Top-P: {top_p}")
|
32 |
print(f"Selected model: {model_name}")
|
33 |
|
|
|
83 |
gr.Markdown("# LLM Test (HF API)") # Add a title to the top of the UI
|
84 |
|
85 |
# Add the model dropdown above the chatbot
|
86 |
+
model_dropdown = gr.Dropdown(choices=models, value=models[0], label="Select Model:")
|
87 |
|
88 |
# Use the existing ChatInterface
|
89 |
gr.ChatInterface(
|
90 |
respond,
|
91 |
additional_inputs=[
|
92 |
+
gr.Textbox(value="", label="Additional System Prompt:"),
|
93 |
+
gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max new tokens:"),
|
94 |
+
gr.Slider(minimum=0.1, maximum=1.0, value=0.3, step=0.1, label="Temperature:"),
|
95 |
gr.Slider(
|
96 |
minimum=0.1,
|
97 |
maximum=1.0,
|