sudipta26889 commited on
Commit
0102b23
·
verified ·
1 Parent(s): d722014

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +119 -0
app.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import os
3
+ from typing import List, Dict, Any, Iterable
4
+
5
+ import gradio as gr
6
+ from huggingface_hub import MCPClient
7
+
8
+ # ---- CONFIG ----
9
+ # The official Gradio Docs MCP Server (SSE) endpoint
10
+ GRADIO_DOCS_MCP_SSE = "https://gradio-docs-mcp.hf.space/gradio_api/mcp/sse"
11
+
12
+ # Choose an LLM that supports tool-calling via HF Inference
13
+ # You can switch to any provider/model you have access to.
14
+ MODEL_ID = os.environ.get("CHAT_MODEL", "meta-llama/Meta-Llama-3.1-8B-Instruct")
15
+ PROVIDER = os.environ.get("CHAT_PROVIDER", "hf-inference") # or "auto"
16
+
17
+
18
+ def make_mcp_client() -> MCPClient:
19
+ """
20
+ Create an MCPClient that:
21
+ 1) connects to the Gradio Docs MCP (SSE)
22
+ 2) talks to a tool-capable model via Hugging Face Inference
23
+ """
24
+ client = MCPClient(model=MODEL_ID, provider=PROVIDER)
25
+ client.add_mcp_server(
26
+ type="sse",
27
+ url=GRADIO_DOCS_MCP_SSE,
28
+ # Optionally pass headers=..., timeout=..., sse_read_timeout=...
29
+ )
30
+ return client
31
+
32
+
33
+ # Single global client for the Space runtime
34
+ mcp_client = make_mcp_client()
35
+
36
+ SYSTEM_PROMPT = (
37
+ "You are a helpful assistant that answers questions strictly using the "
38
+ "Gradio documentation via the MCP tools provided. Prefer the latest docs. "
39
+ "Cite function/class names from the docs and include short code examples when relevant."
40
+ )
41
+
42
+ def to_messages(history: List[Dict[str, Any]], user_msg: str) -> List[Dict[str, Any]]:
43
+ messages: List[Dict[str, Any]] = []
44
+ # Add a system message up front
45
+ messages.append({"role": "system", "content": SYSTEM_PROMPT})
46
+ for turn in history:
47
+ # gr.Chatbot returns [("user", "assistant"), ...] pairs as list of lists
48
+ user, bot = turn
49
+ if user:
50
+ messages.append({"role": "user", "content": str(user)})
51
+ if bot:
52
+ messages.append({"role": "assistant", "content": str(bot)})
53
+ messages.append({"role": "user", "content": user_msg})
54
+ return messages
55
+
56
+
57
+ async def stream_answer(messages: List[Dict[str, Any]]) -> Iterable[str]:
58
+ """
59
+ Stream chunks from the MCPClient while it injects and executes
60
+ the Gradio Docs MCP tools under the hood.
61
+ """
62
+ # The MCPClient will:
63
+ # - list tools from the server
64
+ # - give them to the model
65
+ # - execute tools if the model chooses to call them
66
+ # - stream back model text and tool results
67
+ async for chunk in mcp_client.process_single_turn_with_tools(messages):
68
+ # chunk can be dicts representing text deltas and/or tool results
69
+ if isinstance(chunk, dict):
70
+ if chunk.get("type") == "tool_log":
71
+ # Show tool activity as small, inline updates
72
+ name = chunk.get("tool", "tool")
73
+ status = chunk.get("status", "")
74
+ yield f"\n\n_(using **{name}** {status})_"
75
+ elif chunk.get("type") == "text_delta":
76
+ yield chunk.get("delta", "")
77
+ elif chunk.get("type") == "text":
78
+ yield chunk.get("text", "")
79
+ elif chunk.get("type") == "tool_result":
80
+ # Nicely format tool results if text content returned
81
+ content = chunk.get("content")
82
+ if isinstance(content, str) and content.strip():
83
+ yield f"\n\n**Result:**\n{content}"
84
+ else:
85
+ # Fallback if a provider returns plain text
86
+ yield str(chunk)
87
+
88
+
89
+ async def respond(user_msg: str, history: List[List[str]]):
90
+ messages = to_messages(history, user_msg)
91
+ partial = ""
92
+ async for piece in stream_answer(messages):
93
+ partial += piece
94
+ yield partial
95
+
96
+
97
+ with gr.Blocks(fill_height=True) as demo:
98
+ gr.Markdown("# 🤖 Gradio Docs Chat (MCP Client)\nAsk anything about Gradio—answers are grounded in the official docs via MCP.")
99
+ chat = gr.Chatbot(height=520, type="messages")
100
+ msg = gr.Textbox(placeholder="e.g., How do I use gr.Interface with multiple inputs?", scale=1)
101
+ with gr.Row():
102
+ clear = gr.ClearButton(components=[chat], value="Clear")
103
+
104
+ async def on_submit(user_msg, history):
105
+ history = history + [[user_msg, ""]]
106
+ stream = respond(user_msg, history[:-1])
107
+ async for chunk in stream:
108
+ history[-1][1] = chunk
109
+ yield history
110
+
111
+ msg.submit(
112
+ fn=on_submit,
113
+ inputs=[msg, chat],
114
+ outputs=chat,
115
+ )
116
+
117
+ if __name__ == "__main__":
118
+ # Spaces will call demo.launch() automatically, but keep for local dev
119
+ demo.launch()