Spaces:
Running
Running
SkyNetWalker
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -61,7 +61,7 @@ def respond(
|
|
61 |
|
62 |
print("Completed response generation.")
|
63 |
|
64 |
-
chatbot = gr.Chatbot(height=
|
65 |
|
66 |
print("Chatbot interface created.")
|
67 |
|
@@ -72,7 +72,7 @@ models = [
|
|
72 |
"Qwen/Qwen2.5-Coder-32B-Instruct", #OK
|
73 |
"meta-llama/Llama-3.2-3B-Instruct", #OK
|
74 |
#"Qwen/Qwen2.5-32B-Instruct", #fail, too large
|
75 |
-
|
76 |
#"microsoft/Phi-3-medium-128k-instruct", #fail
|
77 |
#"microsoft/phi-4", #fail, too large to be loaded automatically (29GB > 10GB)
|
78 |
#"meta-llama/Llama-3.3-70B-Instruct", #fail, need HF Pro subscription
|
|
|
61 |
|
62 |
print("Completed response generation.")
|
63 |
|
64 |
+
chatbot = gr.Chatbot(height=200)
|
65 |
|
66 |
print("Chatbot interface created.")
|
67 |
|
|
|
72 |
"Qwen/Qwen2.5-Coder-32B-Instruct", #OK
|
73 |
"meta-llama/Llama-3.2-3B-Instruct", #OK
|
74 |
#"Qwen/Qwen2.5-32B-Instruct", #fail, too large
|
75 |
+
"microsoft/Phi-3-mini-128k-instruct", #fail
|
76 |
#"microsoft/Phi-3-medium-128k-instruct", #fail
|
77 |
#"microsoft/phi-4", #fail, too large to be loaded automatically (29GB > 10GB)
|
78 |
#"meta-llama/Llama-3.3-70B-Instruct", #fail, need HF Pro subscription
|