feat: Implement MCP Search Panel with gradio,
Browse files- app.py +63 -0
- pyproject.toml +96 -61
- requirements.txt +35 -1
- src/__init__.py +9 -0
app.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
|
3 |
+
# Import your existing functions
|
4 |
+
from src import search_on_web, search_custom_sites, search_custom_domain
|
5 |
+
|
6 |
+
|
7 |
+
def run_search_on_web(query: str):
|
8 |
+
result = search_on_web(query)
|
9 |
+
return result.response_str, [item.model_dump() for item in result.citation]
|
10 |
+
|
11 |
+
|
12 |
+
def run_search_custom_sites(query: str, sites: str):
|
13 |
+
site_list = [s.strip() for s in sites.split(",") if s.strip()]
|
14 |
+
result = search_custom_sites(query, site_list)
|
15 |
+
return result.response_str, [item.model_dump() for item in result.citation]
|
16 |
+
|
17 |
+
|
18 |
+
def run_search_custom_domains(query: str, domains: str):
|
19 |
+
domain_list = [d.strip() for d in domains.split(",") if d.strip()]
|
20 |
+
result = search_custom_domain(query, domain_list)
|
21 |
+
return result.response_str, [item.model_dump() for item in result.citation]
|
22 |
+
|
23 |
+
|
24 |
+
with gr.Blocks() as demo:
|
25 |
+
gr.Markdown("# 🔎 MCP Search Panel")
|
26 |
+
|
27 |
+
with gr.Tab("General Web Search"):
|
28 |
+
query_input_web = gr.Textbox(label="Query")
|
29 |
+
output_web_str = gr.Textbox(label="Answer")
|
30 |
+
output_web_cite = gr.JSON(label="Citations")
|
31 |
+
search_btn_web = gr.Button("Search")
|
32 |
+
search_btn_web.click(
|
33 |
+
fn=run_search_on_web,
|
34 |
+
inputs=query_input_web,
|
35 |
+
outputs=[output_web_str, output_web_cite],
|
36 |
+
)
|
37 |
+
|
38 |
+
with gr.Tab("Custom Sites Search"):
|
39 |
+
query_input_sites = gr.Textbox(label="Query")
|
40 |
+
sites_input = gr.Textbox(label="Sites (comma-separated)")
|
41 |
+
output_sites_str = gr.Textbox(label="Answer")
|
42 |
+
output_sites_cite = gr.JSON(label="Citations")
|
43 |
+
search_btn_sites = gr.Button("Search")
|
44 |
+
search_btn_sites.click(
|
45 |
+
fn=run_search_custom_sites,
|
46 |
+
inputs=[query_input_sites, sites_input],
|
47 |
+
outputs=[output_sites_str, output_sites_cite],
|
48 |
+
)
|
49 |
+
|
50 |
+
with gr.Tab("Custom Domains Search"):
|
51 |
+
query_input_domains = gr.Textbox(label="Query")
|
52 |
+
domains_input = gr.Textbox(label="Domains (comma-separated, e.g., edu, gov)")
|
53 |
+
output_domains_str = gr.Textbox(label="Answer")
|
54 |
+
output_domains_cite = gr.JSON(label="Citations")
|
55 |
+
search_btn_domains = gr.Button("Search")
|
56 |
+
search_btn_domains.click(
|
57 |
+
fn=run_search_custom_domains,
|
58 |
+
inputs=[query_input_domains, domains_input],
|
59 |
+
outputs=[output_domains_str, output_domains_cite],
|
60 |
+
)
|
61 |
+
|
62 |
+
if __name__ == "__main__":
|
63 |
+
demo.launch(mcp_server=True)
|
pyproject.toml
CHANGED
@@ -5,65 +5,100 @@ description = "Add your description here"
|
|
5 |
readme = "README.md"
|
6 |
requires-python = ">=3.10"
|
7 |
dependencies = [
|
8 |
-
"
|
9 |
-
"
|
10 |
-
"
|
11 |
-
"
|
12 |
-
"
|
13 |
-
"
|
14 |
-
"
|
15 |
-
"
|
16 |
-
"
|
17 |
-
"
|
18 |
-
"
|
19 |
-
"
|
20 |
-
"
|
21 |
-
"
|
22 |
-
"
|
23 |
-
"
|
24 |
-
"
|
25 |
-
"
|
26 |
-
"
|
27 |
-
"
|
28 |
-
"
|
29 |
-
"
|
30 |
-
"
|
31 |
-
"
|
32 |
-
"
|
33 |
-
"
|
34 |
-
"
|
35 |
-
"
|
36 |
-
"
|
37 |
-
"
|
38 |
-
"
|
39 |
-
"
|
40 |
-
"
|
41 |
-
"
|
42 |
-
"
|
43 |
-
"
|
44 |
-
"
|
45 |
-
"
|
46 |
-
"
|
47 |
-
"
|
48 |
-
"
|
49 |
-
"
|
50 |
-
"
|
51 |
-
"
|
52 |
-
"
|
53 |
-
"
|
54 |
-
"
|
55 |
-
"
|
56 |
-
"
|
57 |
-
"
|
58 |
-
"
|
59 |
-
"
|
60 |
-
"
|
61 |
-
"
|
62 |
-
"
|
63 |
-
"
|
64 |
-
"
|
65 |
-
"
|
66 |
-
"
|
67 |
-
"
|
68 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
]
|
|
|
5 |
readme = "README.md"
|
6 |
requires-python = ">=3.10"
|
7 |
dependencies = [
|
8 |
+
aiofiles = "24.1.0"
|
9 |
+
annotated-types = "0.7.0"
|
10 |
+
anyio = "4.9.0"
|
11 |
+
babel = "2.17.0"
|
12 |
+
beautifulsoup4 = "4.13.4"
|
13 |
+
cachetools = "5.5.2"
|
14 |
+
certifi = "2025.4.26"
|
15 |
+
charset-normalizer = "3.4.2"
|
16 |
+
click = "8.2.1"
|
17 |
+
courlan = "1.3.2"
|
18 |
+
dateparser = "1.2.1"
|
19 |
+
distro = "1.9.0"
|
20 |
+
exceptiongroup = "1.3.0"
|
21 |
+
fastapi = "0.115.12"
|
22 |
+
ffmpy = "0.6.0"
|
23 |
+
filelock = "3.18.0"
|
24 |
+
fsspec = "2025.5.1"
|
25 |
+
google-api-core = "2.25.0"
|
26 |
+
google-api-python-client = "2.171.0"
|
27 |
+
google-auth = "2.40.3"
|
28 |
+
google-auth-httplib2 = "0.2.0"
|
29 |
+
googleapis-common-protos = "1.70.0"
|
30 |
+
googlesearch-python = "1.3.0"
|
31 |
+
gradio = "5.33.1"
|
32 |
+
gradio-client = "1.10.3"
|
33 |
+
groovy = "0.1.2"
|
34 |
+
h11 = "0.16.0"
|
35 |
+
hf-xet = "1.1.3"
|
36 |
+
htmldate = "1.9.3"
|
37 |
+
httpcore = "1.0.9"
|
38 |
+
httplib2 = "0.22.0"
|
39 |
+
httpx = "0.28.1"
|
40 |
+
httpx-sse = "0.4.0"
|
41 |
+
huggingface-hub = "0.32.5"
|
42 |
+
idna = "3.10"
|
43 |
+
jinja2 = "3.1.6"
|
44 |
+
jiter = "0.10.0"
|
45 |
+
joblib = "1.5.1"
|
46 |
+
justext = "3.0.2"
|
47 |
+
lxml = "5.4.0"
|
48 |
+
lxml-html-clean = "0.4.2"
|
49 |
+
markdown-it-py = "3.0.0"
|
50 |
+
markupsafe = "3.0.2"
|
51 |
+
mcp = "1.9.3"
|
52 |
+
mdurl = "0.1.2"
|
53 |
+
nltk = "3.9.1"
|
54 |
+
numpy = "2.2.6"
|
55 |
+
openai = "1.85.0"
|
56 |
+
orjson = "3.10.18"
|
57 |
+
packaging = "25.0"
|
58 |
+
pandas = "2.3.0"
|
59 |
+
pillow = "11.2.1"
|
60 |
+
proto-plus = "1.26.1"
|
61 |
+
protobuf = "6.31.1"
|
62 |
+
pyasn1 = "0.6.1"
|
63 |
+
pyasn1-modules = "0.4.2"
|
64 |
+
pydantic = "2.11.5"
|
65 |
+
pydantic-core = "2.33.2"
|
66 |
+
pydantic-settings = "2.9.1"
|
67 |
+
pydub = "0.25.1"
|
68 |
+
pygments = "2.19.1"
|
69 |
+
pyparsing = "3.2.3"
|
70 |
+
python-dateutil = "2.9.0.post0"
|
71 |
+
python-dotenv = "1.1.0"
|
72 |
+
python-multipart = "0.0.20"
|
73 |
+
pytz = "2025.2"
|
74 |
+
pyyaml = "6.0.2"
|
75 |
+
regex = "2024.11.6"
|
76 |
+
requests = "2.32.4"
|
77 |
+
rich = "14.0.0"
|
78 |
+
rsa = "4.9.1"
|
79 |
+
ruff = "0.11.13"
|
80 |
+
safehttpx = "0.1.6"
|
81 |
+
semantic-version = "2.10.0"
|
82 |
+
shellingham = "1.5.4"
|
83 |
+
six = "1.17.0"
|
84 |
+
sniffio = "1.3.1"
|
85 |
+
soupsieve = "2.7"
|
86 |
+
sse-starlette = "2.3.6"
|
87 |
+
starlette = "0.46.2"
|
88 |
+
textblob = "0.19.0"
|
89 |
+
tld = "0.13.1"
|
90 |
+
tomlkit = "0.13.3"
|
91 |
+
tqdm = "4.67.1"
|
92 |
+
trafilatura = "2.0.0"
|
93 |
+
typer = "0.16.0"
|
94 |
+
typing-extensions = "4.14.0"
|
95 |
+
typing-inspection = "0.4.1"
|
96 |
+
tzdata = "2025.2"
|
97 |
+
tzlocal = "5.3.1"
|
98 |
+
uritemplate = "4.2.0"
|
99 |
+
urllib3 = "2.4.0"
|
100 |
+
uvicorn = "0.34.3"
|
101 |
+
validators = "0.35.0"
|
102 |
+
websockets = "15.0.1"
|
103 |
+
|
104 |
]
|
requirements.txt
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
annotated-types==0.7.0
|
2 |
anyio==4.9.0
|
3 |
babel==2.17.0
|
@@ -10,25 +11,45 @@ courlan==1.3.2
|
|
10 |
dateparser==1.2.1
|
11 |
distro==1.9.0
|
12 |
exceptiongroup==1.3.0
|
|
|
|
|
|
|
|
|
13 |
google-api-core==2.25.0
|
14 |
google-api-python-client==2.171.0
|
15 |
google-auth==2.40.3
|
16 |
google-auth-httplib2==0.2.0
|
17 |
googleapis-common-protos==1.70.0
|
18 |
googlesearch-python==1.3.0
|
|
|
|
|
|
|
19 |
h11==0.16.0
|
|
|
20 |
htmldate==1.9.3
|
21 |
httpcore==1.0.9
|
22 |
httplib2==0.22.0
|
23 |
httpx==0.28.1
|
24 |
httpx-sse==0.4.0
|
|
|
25 |
idna==3.10
|
|
|
26 |
jiter==0.10.0
|
|
|
27 |
justext==3.0.2
|
28 |
lxml==5.4.0
|
29 |
lxml-html-clean==0.4.2
|
|
|
|
|
30 |
mcp==1.9.3
|
|
|
|
|
|
|
31 |
openai==1.85.0
|
|
|
|
|
|
|
|
|
32 |
proto-plus==1.26.1
|
33 |
protobuf==6.31.1
|
34 |
pyasn1==0.6.1
|
@@ -36,26 +57,39 @@ pyasn1-modules==0.4.2
|
|
36 |
pydantic==2.11.5
|
37 |
pydantic-core==2.33.2
|
38 |
pydantic-settings==2.9.1
|
|
|
|
|
39 |
pyparsing==3.2.3
|
40 |
python-dateutil==2.9.0.post0
|
41 |
python-dotenv==1.1.0
|
42 |
python-multipart==0.0.20
|
43 |
pytz==2025.2
|
|
|
44 |
regex==2024.11.6
|
45 |
requests==2.32.4
|
|
|
46 |
rsa==4.9.1
|
|
|
|
|
|
|
|
|
47 |
six==1.17.0
|
48 |
sniffio==1.3.1
|
49 |
soupsieve==2.7
|
50 |
sse-starlette==2.3.6
|
51 |
-
starlette==0.
|
|
|
52 |
tld==0.13.1
|
|
|
53 |
tqdm==4.67.1
|
54 |
trafilatura==2.0.0
|
|
|
55 |
typing-extensions==4.14.0
|
56 |
typing-inspection==0.4.1
|
|
|
57 |
tzlocal==5.3.1
|
58 |
uritemplate==4.2.0
|
59 |
urllib3==2.4.0
|
60 |
uvicorn==0.34.3
|
61 |
validators==0.35.0
|
|
|
|
1 |
+
aiofiles==24.1.0
|
2 |
annotated-types==0.7.0
|
3 |
anyio==4.9.0
|
4 |
babel==2.17.0
|
|
|
11 |
dateparser==1.2.1
|
12 |
distro==1.9.0
|
13 |
exceptiongroup==1.3.0
|
14 |
+
fastapi==0.115.12
|
15 |
+
ffmpy==0.6.0
|
16 |
+
filelock==3.18.0
|
17 |
+
fsspec==2025.5.1
|
18 |
google-api-core==2.25.0
|
19 |
google-api-python-client==2.171.0
|
20 |
google-auth==2.40.3
|
21 |
google-auth-httplib2==0.2.0
|
22 |
googleapis-common-protos==1.70.0
|
23 |
googlesearch-python==1.3.0
|
24 |
+
gradio==5.33.1
|
25 |
+
gradio-client==1.10.3
|
26 |
+
groovy==0.1.2
|
27 |
h11==0.16.0
|
28 |
+
hf-xet==1.1.3
|
29 |
htmldate==1.9.3
|
30 |
httpcore==1.0.9
|
31 |
httplib2==0.22.0
|
32 |
httpx==0.28.1
|
33 |
httpx-sse==0.4.0
|
34 |
+
huggingface-hub==0.32.5
|
35 |
idna==3.10
|
36 |
+
jinja2==3.1.6
|
37 |
jiter==0.10.0
|
38 |
+
joblib==1.5.1
|
39 |
justext==3.0.2
|
40 |
lxml==5.4.0
|
41 |
lxml-html-clean==0.4.2
|
42 |
+
markdown-it-py==3.0.0
|
43 |
+
markupsafe==3.0.2
|
44 |
mcp==1.9.3
|
45 |
+
mdurl==0.1.2
|
46 |
+
nltk==3.9.1
|
47 |
+
numpy==2.2.6
|
48 |
openai==1.85.0
|
49 |
+
orjson==3.10.18
|
50 |
+
packaging==25.0
|
51 |
+
pandas==2.3.0
|
52 |
+
pillow==11.2.1
|
53 |
proto-plus==1.26.1
|
54 |
protobuf==6.31.1
|
55 |
pyasn1==0.6.1
|
|
|
57 |
pydantic==2.11.5
|
58 |
pydantic-core==2.33.2
|
59 |
pydantic-settings==2.9.1
|
60 |
+
pydub==0.25.1
|
61 |
+
pygments==2.19.1
|
62 |
pyparsing==3.2.3
|
63 |
python-dateutil==2.9.0.post0
|
64 |
python-dotenv==1.1.0
|
65 |
python-multipart==0.0.20
|
66 |
pytz==2025.2
|
67 |
+
pyyaml==6.0.2
|
68 |
regex==2024.11.6
|
69 |
requests==2.32.4
|
70 |
+
rich==14.0.0
|
71 |
rsa==4.9.1
|
72 |
+
ruff==0.11.13
|
73 |
+
safehttpx==0.1.6
|
74 |
+
semantic-version==2.10.0
|
75 |
+
shellingham==1.5.4
|
76 |
six==1.17.0
|
77 |
sniffio==1.3.1
|
78 |
soupsieve==2.7
|
79 |
sse-starlette==2.3.6
|
80 |
+
starlette==0.46.2
|
81 |
+
textblob==0.19.0
|
82 |
tld==0.13.1
|
83 |
+
tomlkit==0.13.3
|
84 |
tqdm==4.67.1
|
85 |
trafilatura==2.0.0
|
86 |
+
typer==0.16.0
|
87 |
typing-extensions==4.14.0
|
88 |
typing-inspection==0.4.1
|
89 |
+
tzdata==2025.2
|
90 |
tzlocal==5.3.1
|
91 |
uritemplate==4.2.0
|
92 |
urllib3==2.4.0
|
93 |
uvicorn==0.34.3
|
94 |
validators==0.35.0
|
95 |
+
websockets==15.0.1
|
src/__init__.py
CHANGED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .tools.custom_domains_search_tool import search_custom_domain
|
2 |
+
from .tools.custom_sites_search_tool import search_custom_sites
|
3 |
+
from .tools.search_on_web_tool import search_on_web
|
4 |
+
|
5 |
+
__all__ = [
|
6 |
+
"search_custom_domain",
|
7 |
+
"search_custom_sites",
|
8 |
+
"search_on_web"
|
9 |
+
]
|