Nymbo commited on
Commit
b0cbd1c
·
verified ·
1 Parent(s): 69de3d2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -93
app.py CHANGED
@@ -23,15 +23,7 @@ def respond(
23
  seed,
24
  custom_model
25
  ):
26
- """
27
- This function handles the conversation logic and streams the response.
28
 
29
- Arguments:
30
- - message: The new user message
31
- - history: Chat history in the form of a list of (user_message, assistant_message) pairs
32
- - system_message: The system prompt specifying how the assistant should behave
33
- - max_tokens, temperature, top_p, frequency_penalty, seed, custom_model: Various parameters for text generation
34
- """
35
  print(f"Received message: {message}")
36
  print(f"History: {history}")
37
  print(f"System message: {system_message}")
@@ -43,7 +35,6 @@ def respond(
43
  if seed == -1:
44
  seed = None
45
 
46
- # Create the base system-level message
47
  messages = [{"role": "system", "content": system_message}]
48
  print("Initial messages array constructed.")
49
 
@@ -70,7 +61,6 @@ def respond(
70
  response = ""
71
  print("Sending request to OpenAI API.")
72
 
73
- # Stream tokens from the HF inference endpoint
74
  for message_chunk in client.chat.completions.create(
75
  model=model_to_use,
76
  max_tokens=max_tokens,
@@ -88,29 +78,13 @@ def respond(
88
 
89
  print("Completed response generation.")
90
 
 
91
 
92
- # -------------------------
93
- # Gradio UI definitions
94
- # -------------------------
95
-
96
- # Chatbot interface
97
- chatbot = gr.Chatbot(
98
- height=600,
99
- show_copy_button=True,
100
- placeholder="Select a model and begin chatting",
101
- likeable=True,
102
- layout="panel"
103
- )
104
  print("Chatbot interface created.")
105
 
106
- # System prompt textbox
107
- system_message_box = gr.Textbox(
108
- value="",
109
- placeholder="You are a helpful assistant.",
110
- label="System Prompt"
111
- )
112
 
113
- # Sliders
114
  max_tokens_slider = gr.Slider(
115
  minimum=1,
116
  maximum=4096,
@@ -147,9 +121,7 @@ seed_slider = gr.Slider(
147
  label="Seed (-1 for random)"
148
  )
149
 
150
- # This textbox is what the respond() function sees as "custom_model"
151
- # We will visually place it inside the Model Selection accordion (below),
152
- # but we define it here so it can be passed to the ChatInterface.
153
  custom_model_box = gr.Textbox(
154
  value="",
155
  label="Custom Model",
@@ -157,7 +129,14 @@ custom_model_box = gr.Textbox(
157
  placeholder="meta-llama/Llama-3.3-70B-Instruct"
158
  )
159
 
160
- # Create the ChatInterface, referencing the respond function and including all inputs
 
 
 
 
 
 
 
161
  demo = gr.ChatInterface(
162
  fn=respond,
163
  additional_inputs=[
@@ -167,7 +146,7 @@ demo = gr.ChatInterface(
167
  top_p_slider,
168
  frequency_penalty_slider,
169
  seed_slider,
170
- custom_model_box, # We pass it here to the ChatInterface function
171
  ],
172
  fill_height=True,
173
  chatbot=chatbot,
@@ -175,67 +154,41 @@ demo = gr.ChatInterface(
175
  )
176
  print("ChatInterface object created.")
177
 
178
-
179
- # --------------------------
180
- # Additional Model Selection
181
- # --------------------------
182
-
183
- # This is the function that updates the Custom Model textbox whenever the user picks a model from the Radio
184
- def set_custom_model_from_radio(selected):
185
- """
186
- Triggered when the user picks a model from the 'Featured Models' radio.
187
- We will update the Custom Model text box with that selection automatically.
188
- """
189
- print(f"Featured model selected: {selected}")
190
- return selected
191
-
192
- # The set of models displayed in the radio
193
- models_list = [
194
- "meta-llama/Llama-3.3-70B-Instruct",
195
- "meta-llama/Llama-3.2-3B-Instruct",
196
- "meta-llama/Llama-3.2-1B-Instruct",
197
- "meta-llama/Llama-3.1-8B-Instruct",
198
- "NousResearch/Hermes-3-Llama-3.1-8B",
199
- "google/gemma-2-27b-it",
200
- "google/gemma-2-9b-it",
201
- "google/gemma-2-2b-it",
202
- "mistralai/Mistral-Nemo-Instruct-2407",
203
- "mistralai/Mixtral-8x7B-Instruct-v0.1",
204
- "mistralai/Mistral-7B-Instruct-v0.3",
205
- "Qwen/Qwen2.5-72B-Instruct",
206
- "Qwen/QwQ-32B-Preview",
207
- "PowerInfer/SmallThinker-3B-Preview",
208
- "HuggingFaceTB/SmolLM2-1.7B-Instruct",
209
- "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
210
- "microsoft/Phi-3.5-mini-instruct",
211
- ]
212
- print("Models list initialized.")
213
-
214
- # This function handles searching for models by a user-provided filter
215
- def filter_models(search_term):
216
- print(f"Filtering models with search term: {search_term}")
217
- filtered = [m for m in models_list if search_term.lower() in m.lower()]
218
- print(f"Filtered models: {filtered}")
219
- return gr.update(choices=filtered)
220
-
221
-
222
- # --------------------------------
223
- # Advanced UI arrangement with demo
224
- # --------------------------------
225
  with demo:
226
- # Create an Accordion for model selection
227
  with gr.Accordion("Model Selection", open=False):
228
- # Place the Filter Models textbox and the Custom Model textbox side by side
229
  with gr.Row():
230
  model_search_box = gr.Textbox(
231
  label="Filter Models",
232
  placeholder="Search for a featured model...",
233
- lines=1
 
234
  )
235
- # Render the already-defined 'custom_model_box' so it appears in this row
236
- custom_model_box.render()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
237
 
238
- # Create the Radio for featured models
239
  featured_model_radio = gr.Radio(
240
  label="Select a model below",
241
  choices=models_list,
@@ -244,7 +197,12 @@ with demo:
244
  )
245
  print("Featured models radio button created.")
246
 
247
- # Link the search box to the filtering function
 
 
 
 
 
248
  model_search_box.change(
249
  fn=filter_models,
250
  inputs=model_search_box,
@@ -252,7 +210,6 @@ with demo:
252
  )
253
  print("Model search box change event linked.")
254
 
255
- # Link the radio to the function that sets the custom model textbox
256
  featured_model_radio.change(
257
  fn=set_custom_model_from_radio,
258
  inputs=featured_model_radio,
@@ -262,10 +219,6 @@ with demo:
262
 
263
  print("Gradio interface initialized.")
264
 
265
-
266
- # -----------------------
267
- # Launch the application
268
- # -----------------------
269
  if __name__ == "__main__":
270
  print("Launching the demo application.")
271
  demo.launch()
 
23
  seed,
24
  custom_model
25
  ):
 
 
26
 
 
 
 
 
 
 
27
  print(f"Received message: {message}")
28
  print(f"History: {history}")
29
  print(f"System message: {system_message}")
 
35
  if seed == -1:
36
  seed = None
37
 
 
38
  messages = [{"role": "system", "content": system_message}]
39
  print("Initial messages array constructed.")
40
 
 
61
  response = ""
62
  print("Sending request to OpenAI API.")
63
 
 
64
  for message_chunk in client.chat.completions.create(
65
  model=model_to_use,
66
  max_tokens=max_tokens,
 
78
 
79
  print("Completed response generation.")
80
 
81
+ # GRADIO UI
82
 
83
+ chatbot = gr.Chatbot(height=600, show_copy_button=True, placeholder="Select a model and begin chatting", likeable=True, layout="panel")
 
 
 
 
 
 
 
 
 
 
 
84
  print("Chatbot interface created.")
85
 
86
+ system_message_box = gr.Textbox(value="", placeholder="You are a helpful assistant.", label="System Prompt")
 
 
 
 
 
87
 
 
88
  max_tokens_slider = gr.Slider(
89
  minimum=1,
90
  maximum=4096,
 
121
  label="Seed (-1 for random)"
122
  )
123
 
124
+ # Move the custom_model_box definition to be used inside the accordion
 
 
125
  custom_model_box = gr.Textbox(
126
  value="",
127
  label="Custom Model",
 
129
  placeholder="meta-llama/Llama-3.3-70B-Instruct"
130
  )
131
 
132
+ def set_custom_model_from_radio(selected):
133
+ """
134
+ This function will get triggered whenever someone picks a model from the 'Featured Models' radio.
135
+ We will update the Custom Model text box with that selection automatically.
136
+ """
137
+ print(f"Featured model selected: {selected}")
138
+ return selected
139
+
140
  demo = gr.ChatInterface(
141
  fn=respond,
142
  additional_inputs=[
 
146
  top_p_slider,
147
  frequency_penalty_slider,
148
  seed_slider,
149
+ custom_model_box, # Keep this reference here for the respond function
150
  ],
151
  fill_height=True,
152
  chatbot=chatbot,
 
154
  )
155
  print("ChatInterface object created.")
156
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
  with demo:
 
158
  with gr.Accordion("Model Selection", open=False):
159
+ # Create a row for the search and custom model inputs
160
  with gr.Row():
161
  model_search_box = gr.Textbox(
162
  label="Filter Models",
163
  placeholder="Search for a featured model...",
164
+ lines=1,
165
+ scale=1 # Equal scaling with custom_model_box
166
  )
167
+ # Place the custom model box here, alongside the search box
168
+ custom_model_box.render() # Render the previously defined textbox here
169
+ print("Model search box and custom model box created.")
170
+
171
+ models_list = [
172
+ "meta-llama/Llama-3.3-70B-Instruct",
173
+ "meta-llama/Llama-3.2-3B-Instruct",
174
+ "meta-llama/Llama-3.2-1B-Instruct",
175
+ "meta-llama/Llama-3.1-8B-Instruct",
176
+ "NousResearch/Hermes-3-Llama-3.1-8B",
177
+ "google/gemma-2-27b-it",
178
+ "google/gemma-2-9b-it",
179
+ "google/gemma-2-2b-it",
180
+ "mistralai/Mistral-Nemo-Instruct-2407",
181
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
182
+ "mistralai/Mistral-7B-Instruct-v0.3",
183
+ "Qwen/Qwen2.5-72B-Instruct",
184
+ "Qwen/QwQ-32B-Preview",
185
+ "PowerInfer/SmallThinker-3B-Preview",
186
+ "HuggingFaceTB/SmolLM2-1.7B-Instruct",
187
+ "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
188
+ "microsoft/Phi-3.5-mini-instruct",
189
+ ]
190
+ print("Models list initialized.")
191
 
 
192
  featured_model_radio = gr.Radio(
193
  label="Select a model below",
194
  choices=models_list,
 
197
  )
198
  print("Featured models radio button created.")
199
 
200
+ def filter_models(search_term):
201
+ print(f"Filtering models with search term: {search_term}")
202
+ filtered = [m for m in models_list if search_term.lower() in m.lower()]
203
+ print(f"Filtered models: {filtered}")
204
+ return gr.update(choices=filtered)
205
+
206
  model_search_box.change(
207
  fn=filter_models,
208
  inputs=model_search_box,
 
210
  )
211
  print("Model search box change event linked.")
212
 
 
213
  featured_model_radio.change(
214
  fn=set_custom_model_from_radio,
215
  inputs=featured_model_radio,
 
219
 
220
  print("Gradio interface initialized.")
221
 
 
 
 
 
222
  if __name__ == "__main__":
223
  print("Launching the demo application.")
224
  demo.launch()