sudipta26889 commited on
Commit
9ed206f
·
verified ·
1 Parent(s): 1a72432

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -19
app.py CHANGED
@@ -161,7 +161,6 @@ async def stream_answer(
161
  ctype = chunk.get("type")
162
 
163
  if ctype == "tool_log":
164
- # Example: {"type": "tool_log", "tool": "...", "status": "started/finished"}
165
  name = chunk.get("tool", "tool")
166
  status = chunk.get("status", "")
167
  _append_log(tool_log, f"- {name} **{status}**")
@@ -174,7 +173,6 @@ async def stream_answer(
174
  yield {"delta": chunk.get("text", ""), "tool_log": _format_tool_log(tool_log), "citations": _format_citations(citations)}
175
 
176
  elif ctype == "tool_result":
177
- # Try to capture a useful citation label and optional URL if present
178
  tool_name = chunk.get("tool", "tool")
179
  content = chunk.get("content")
180
  url = None
@@ -205,7 +203,6 @@ async def stream_answer(
205
  }
206
 
207
  else:
208
- # Fallback if provider yields plain strings
209
  yield {"delta": str(chunk), "tool_log": _format_tool_log(tool_log), "citations": _format_citations(citations)}
210
 
211
  except Exception as e:
@@ -233,7 +230,7 @@ with gr.Blocks(fill_height=True) as demo:
233
  chat = gr.Chatbot(
234
  label="Gradio Docs Assistant",
235
  height=520,
236
- type="messages", # expects: [{"role": "...", "content": "..."}]
237
  )
238
  with gr.Row():
239
  msg = gr.Textbox(
@@ -270,12 +267,10 @@ with gr.Blocks(fill_height=True) as demo:
270
  - tool activity
271
  - citations
272
  """
273
- # Start a new assistant message for streaming
274
  history_msgs = (history_msgs or []) + [{"role": "user", "content": user_msg}]
275
  history_msgs.append({"role": "assistant", "content": ""})
276
  yield history_msgs, gr.update(value="_No tool activity yet._"), gr.update(value="_No citations captured yet._")
277
 
278
- # Compose messages for LLM
279
  messages_for_llm = to_llm_messages(history_msgs[:-1], user_msg, style_choice)
280
 
281
  async for chunk in stream_answer(messages_for_llm, MODEL_ID, PROVIDER, HF_TOKEN):
@@ -284,22 +279,18 @@ with gr.Blocks(fill_height=True) as demo:
284
  history_msgs[-1]["content"] += delta
285
  yield history_msgs, gr.update(value=chunk.get("tool_log", "")), gr.update(value=chunk.get("citations", ""))
286
 
287
- # Wire both Enter and button click; also pass "style"
288
  msg.submit(on_submit, inputs=[msg, chat, style], outputs=[chat, tool_log_md, citations_md], queue=True)
289
  send_btn.click(on_submit, inputs=[msg, chat, style], outputs=[chat, tool_log_md, citations_md], queue=True)
290
 
291
  # ----------------------------
292
  # Gradio runtime (queue + launch)
293
  # ----------------------------
294
- # Enable Gradio’s task queue (important for async streaming)
295
- demo.queue(max_size=32)
296
-
297
- # On Spaces, Gradio auto-serves; only call launch() for local runs.
298
- if __name__ == "__main__":
299
- on_spaces = bool(os.environ.get("SPACE_ID"))
300
- if not on_spaces:
301
- demo.launch(
302
- ssr_mode=False, # disable SSR to prevent Node server start/stop churn
303
- server_name="0.0.0.0",
304
- server_port=7860,
305
- )
 
161
  ctype = chunk.get("type")
162
 
163
  if ctype == "tool_log":
 
164
  name = chunk.get("tool", "tool")
165
  status = chunk.get("status", "")
166
  _append_log(tool_log, f"- {name} **{status}**")
 
173
  yield {"delta": chunk.get("text", ""), "tool_log": _format_tool_log(tool_log), "citations": _format_citations(citations)}
174
 
175
  elif ctype == "tool_result":
 
176
  tool_name = chunk.get("tool", "tool")
177
  content = chunk.get("content")
178
  url = None
 
203
  }
204
 
205
  else:
 
206
  yield {"delta": str(chunk), "tool_log": _format_tool_log(tool_log), "citations": _format_citations(citations)}
207
 
208
  except Exception as e:
 
230
  chat = gr.Chatbot(
231
  label="Gradio Docs Assistant",
232
  height=520,
233
+ type="messages",
234
  )
235
  with gr.Row():
236
  msg = gr.Textbox(
 
267
  - tool activity
268
  - citations
269
  """
 
270
  history_msgs = (history_msgs or []) + [{"role": "user", "content": user_msg}]
271
  history_msgs.append({"role": "assistant", "content": ""})
272
  yield history_msgs, gr.update(value="_No tool activity yet._"), gr.update(value="_No citations captured yet._")
273
 
 
274
  messages_for_llm = to_llm_messages(history_msgs[:-1], user_msg, style_choice)
275
 
276
  async for chunk in stream_answer(messages_for_llm, MODEL_ID, PROVIDER, HF_TOKEN):
 
279
  history_msgs[-1]["content"] += delta
280
  yield history_msgs, gr.update(value=chunk.get("tool_log", "")), gr.update(value=chunk.get("citations", ""))
281
 
 
282
  msg.submit(on_submit, inputs=[msg, chat, style], outputs=[chat, tool_log_md, citations_md], queue=True)
283
  send_btn.click(on_submit, inputs=[msg, chat, style], outputs=[chat, tool_log_md, citations_md], queue=True)
284
 
285
  # ----------------------------
286
  # Gradio runtime (queue + launch)
287
  # ----------------------------
288
+ # IMPORTANT: assign the queued app back to 'demo' for older Gradio versions.
289
+ demo = demo.queue(max_size=32)
290
+
291
+ # Always launch; Spaces runs this script directly.
292
+ demo.launch(
293
+ ssr_mode=False, # disable SSR to avoid Node helper churn
294
+ server_name="0.0.0.0",
295
+ server_port=7860,
296
+ )