|
import gradio as gr |
|
import requests |
|
from typing import List, Dict |
|
from huggingface_hub import HfApi |
|
import os |
|
from dotenv import load_dotenv |
|
from pinecone import Pinecone |
|
from openai import OpenAI |
|
import re |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
HF_TOKEN = os.getenv("HF_TOKEN") |
|
api = HfApi(token=HF_TOKEN) if HF_TOKEN else HfApi() |
|
|
|
def keyword_search_hf_spaces(query: str = "", limit: int = 3) -> Dict: |
|
""" |
|
Search for MCPs in Hugging Face Spaces. |
|
|
|
Args: |
|
query: Search query string |
|
limit: Maximum number of results to return (default: 3) |
|
|
|
Returns: |
|
Dictionary containing search results with MCP information |
|
""" |
|
try: |
|
|
|
spaces = list(api.list_spaces( |
|
search=query, |
|
sort="likes", |
|
direction=-1, |
|
filter="mcp-server" |
|
)) |
|
|
|
results = [] |
|
for space in spaces[:limit]: |
|
try: |
|
|
|
space_id_lower = re.sub(r'[^a-z0-9]', '-', space.id.lower()) |
|
|
|
space_id_lower = re.sub(r'-+', '-', space_id_lower) |
|
|
|
space_id_lower = space_id_lower.strip('-') |
|
sse_url = f"https://{space_id_lower}.hf.space/gradio_api/mcp/sse" |
|
|
|
space_info = { |
|
"id": space.id, |
|
"likes": space.likes, |
|
"trending_score": space.trending_score, |
|
"source": "huggingface", |
|
"configuration": { |
|
"gradio": { |
|
"command": "npx", |
|
"args": [ |
|
"mcp-remote", |
|
sse_url, |
|
"--transport", |
|
"sse-only" |
|
] |
|
} |
|
} |
|
} |
|
results.append(space_info) |
|
except Exception as e: |
|
continue |
|
|
|
return { |
|
"results": results, |
|
"total": len(results) |
|
} |
|
except Exception as e: |
|
return { |
|
"error": str(e), |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
def keyword_search_smithery(query: str = "", limit: int = 3, os_type: str = "Mac/Linux") -> Dict: |
|
""" |
|
Search for MCPs in Smithery Registry. |
|
|
|
Args: |
|
query: Search query string |
|
limit: Maximum number of results to return (default: 3) |
|
os_type: Operating system type ("Mac/Linux", "Windows", "WSL") |
|
|
|
Returns: |
|
Dictionary containing search results with MCP information |
|
""" |
|
try: |
|
|
|
SMITHERY_TOKEN = os.getenv("SMITHERY_TOKEN") |
|
if not SMITHERY_TOKEN: |
|
return { |
|
"error": "SMITHERY_TOKEN not found", |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
|
|
headers = { |
|
'Authorization': f'Bearer {SMITHERY_TOKEN}' |
|
} |
|
|
|
|
|
search_query = f"{query} is:deployed" |
|
|
|
params = { |
|
'q': search_query, |
|
'page': 1, |
|
'pageSize': 100 |
|
} |
|
|
|
|
|
response = requests.get( |
|
'https://registry.smithery.ai/servers', |
|
headers=headers, |
|
params=params |
|
) |
|
|
|
if response.status_code != 200: |
|
return { |
|
"error": f"Smithery API error: {response.status_code}", |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
|
|
data = response.json() |
|
results = [] |
|
|
|
|
|
servers = sorted(data.get('servers', []), key=lambda x: x.get('useCount', 0), reverse=True)[:limit] |
|
|
|
for server in servers: |
|
server_id = server.get('qualifiedName') |
|
|
|
config_server_id = server_id.split('/')[-1] if '/' in server_id else server_id |
|
|
|
|
|
if os_type == "Mac/Linux": |
|
configuration = { |
|
f"{config_server_id}": { |
|
"command": "npx", |
|
"args": [ |
|
"-y", |
|
"@smithery/cli@latest", |
|
"run", |
|
f"{server_id}", |
|
"--key", |
|
"YOUR_SMITHERY_KEY" |
|
] |
|
} |
|
} |
|
elif os_type == "Windows": |
|
configuration = { |
|
f"{config_server_id}": { |
|
"command": "cmd", |
|
"args": [ |
|
"/c", |
|
"npx", |
|
"-y", |
|
"@smithery/cli@latest", |
|
"run", |
|
f"{server_id}", |
|
"--key", |
|
"YOUR_SMITHERY_KEY" |
|
] |
|
} |
|
} |
|
elif os_type == "WSL": |
|
configuration = { |
|
f"{config_server_id}": { |
|
"command": "wsl", |
|
"args": [ |
|
"npx", |
|
"-y", |
|
"@smithery/cli@latest", |
|
"run", |
|
f"{server_id}", |
|
"--key", |
|
"YOUR_SMITHERY_KEY" |
|
] |
|
} |
|
} |
|
|
|
server_info = { |
|
"id": server_id, |
|
"name": server.get('displayName'), |
|
"description": server.get('description'), |
|
"likes": server.get('useCount', 0), |
|
"source": "smithery", |
|
"configuration": configuration |
|
} |
|
|
|
results.append(server_info) |
|
|
|
return { |
|
"results": results, |
|
"total": len(results) |
|
} |
|
|
|
except Exception as e: |
|
return { |
|
"error": str(e), |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
def keyword_search(query: str, sources: List[str], limit: int = 3, os_type: str = "Mac/Linux") -> Dict: |
|
""" |
|
Search for MCPs using keyword matching. |
|
|
|
Args: |
|
query: Keyword search query |
|
sources: List of sources to search from ('huggingface', 'smithery') |
|
limit: Maximum number of results to return (default: 3) |
|
os_type: Operating system type ("Mac/Linux", "Windows", "WSL") |
|
|
|
Returns: |
|
Dictionary containing combined search results |
|
""" |
|
all_results = [] |
|
|
|
if "huggingface" in sources: |
|
hf_results = keyword_search_hf_spaces(query, limit) |
|
all_results.extend(hf_results.get("results", [])) |
|
|
|
if "smithery" in sources: |
|
smithery_results = keyword_search_smithery(query, limit, os_type) |
|
all_results.extend(smithery_results.get("results", [])) |
|
|
|
return { |
|
"results": all_results, |
|
"total": len(all_results), |
|
"search_type": "keyword" |
|
} |
|
|
|
def semantic_search_hf_spaces(query: str = "", limit: int = 3) -> Dict: |
|
""" |
|
Search for MCPs in Hugging Face Spaces using semantic embedding matching. |
|
|
|
Args: |
|
query: Natural language search query |
|
limit: Maximum number of results to return (default: 3) |
|
|
|
Returns: |
|
Dictionary containing search results with MCP information |
|
""" |
|
try: |
|
pinecone_api_key = os.getenv('PINECONE_API_KEY') |
|
openai_api_key = os.getenv('OPENAI_API_KEY') |
|
if not pinecone_api_key or not openai_api_key: |
|
return { |
|
"error": "API keys not found", |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
pc = Pinecone(api_key=pinecone_api_key) |
|
index = pc.Index("hf-mcp") |
|
client = OpenAI(api_key=openai_api_key) |
|
|
|
response = client.embeddings.create( |
|
input=query, |
|
model="text-embedding-3-large" |
|
) |
|
query_embedding = response.data[0].embedding |
|
|
|
results = index.query( |
|
namespace="", |
|
vector=query_embedding, |
|
top_k=limit |
|
) |
|
|
|
space_results = [] |
|
if not results.matches: |
|
return { |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
for match in results.matches: |
|
space_id = match.id |
|
try: |
|
repo_id = space_id.replace('spaces/', '') |
|
space = api.space_info(repo_id) |
|
|
|
|
|
space_id_lower = re.sub(r'[^a-z0-9]', '-', space.id.lower()) |
|
|
|
space_id_lower = re.sub(r'-+', '-', space_id_lower) |
|
|
|
space_id_lower = space_id_lower.strip('-') |
|
sse_url = f"https://{space_id_lower}.hf.space/gradio_api/mcp/sse" |
|
|
|
space_info = { |
|
"id": space.id, |
|
"likes": space.likes, |
|
"trending_score": space.trending_score, |
|
"source": "huggingface", |
|
"score": match.score, |
|
"configuration": { |
|
"mcpServers": { |
|
"gradio": { |
|
"command": "npx", |
|
"args": [ |
|
"mcp-remote", |
|
sse_url, |
|
"--transport", |
|
"sse-only" |
|
] |
|
} |
|
} |
|
} |
|
} |
|
space_results.append(space_info) |
|
except Exception as e: |
|
continue |
|
|
|
return { |
|
"results": space_results, |
|
"total": len(space_results) |
|
} |
|
except Exception as e: |
|
return { |
|
"error": str(e), |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
def semantic_search_smithery(query: str = "", limit: int = 3, os_type: str = "Mac/Linux") -> Dict: |
|
""" |
|
Search for MCPs in Smithery Registry using semantic embedding matching. |
|
|
|
Args: |
|
query: Natural language search query |
|
limit: Maximum number of results to return (default: 3) |
|
os_type: Operating system type ("Mac/Linux", "Windows", "WSL") |
|
|
|
Returns: |
|
Dictionary containing search results with MCP information |
|
""" |
|
try: |
|
from pinecone import Pinecone |
|
from openai import OpenAI |
|
import os |
|
|
|
pinecone_api_key = os.getenv('PINECONE_API_KEY') |
|
openai_api_key = os.getenv('OPENAI_API_KEY') |
|
smithery_token = os.getenv('SMITHERY_TOKEN') |
|
|
|
if not pinecone_api_key or not openai_api_key or not smithery_token: |
|
return { |
|
"error": "API keys not found", |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
pc = Pinecone(api_key=pinecone_api_key) |
|
index = pc.Index("smithery-mcp") |
|
client = OpenAI(api_key=openai_api_key) |
|
|
|
response = client.embeddings.create( |
|
input=query, |
|
model="text-embedding-3-large" |
|
) |
|
query_embedding = response.data[0].embedding |
|
|
|
results = index.query( |
|
namespace="", |
|
vector=query_embedding, |
|
top_k=limit |
|
) |
|
|
|
server_results = [] |
|
if not results.matches: |
|
return { |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
headers = { |
|
'Authorization': f'Bearer {smithery_token}' |
|
} |
|
|
|
for match in results.matches: |
|
server_id = match.id |
|
try: |
|
response = requests.get( |
|
f'https://registry.smithery.ai/servers/{server_id}', |
|
headers=headers |
|
) |
|
if response.status_code != 200: |
|
continue |
|
|
|
server = response.json() |
|
|
|
|
|
config_server_id = server_id.split('/')[-1] if '/' in server_id else server_id |
|
|
|
|
|
if os_type == "Mac/Linux": |
|
configuration = { |
|
f"{config_server_id}": { |
|
"command": "npx", |
|
"args": [ |
|
"-y", |
|
"@smithery/cli@latest", |
|
"run", |
|
f"{server_id}", |
|
"--key", |
|
"YOUR_SMITHERY_KEY" |
|
] |
|
} |
|
} |
|
elif os_type == "Windows": |
|
configuration = { |
|
f"{config_server_id}": { |
|
"command": "cmd", |
|
"args": [ |
|
"/c", |
|
"npx", |
|
"-y", |
|
"@smithery/cli@latest", |
|
"run", |
|
f"{server_id}", |
|
"--key", |
|
"YOUR_SMITHERY_KEY" |
|
] |
|
} |
|
} |
|
elif os_type == "WSL": |
|
configuration = { |
|
f"{config_server_id}": { |
|
"command": "wsl", |
|
"args": [ |
|
"npx", |
|
"-y", |
|
"@smithery/cli@latest", |
|
"run", |
|
f"{server_id}", |
|
"--key", |
|
"YOUR_SMITHERY_KEY" |
|
] |
|
} |
|
} |
|
|
|
server_info = { |
|
"id": server_id, |
|
"name": server.get('displayName'), |
|
"description": server.get('description'), |
|
"likes": server.get('useCount', 0), |
|
"source": "smithery", |
|
"score": match.score, |
|
"configuration": configuration |
|
} |
|
server_results.append(server_info) |
|
except Exception as e: |
|
continue |
|
|
|
return { |
|
"results": server_results, |
|
"total": len(server_results) |
|
} |
|
except Exception as e: |
|
return { |
|
"error": str(e), |
|
"results": [], |
|
"total": 0 |
|
} |
|
|
|
def semantic_search(query: str, sources: List[str], limit: int = 3, os_type: str = "Mac/Linux") -> Dict: |
|
""" |
|
Search for MCPs using semantic embedding matching. |
|
|
|
Args: |
|
query: Natural language search query |
|
sources: List of sources to search from ('huggingface', 'smithery') |
|
limit: Maximum number of results to return (default: 3) |
|
os_type: Operating system type ("Mac/Linux", "Windows", "WSL") |
|
|
|
Returns: |
|
Dictionary containing combined search results |
|
""" |
|
all_results = [] |
|
|
|
if "huggingface" in sources: |
|
try: |
|
hf_results = semantic_search_hf_spaces(query, limit) |
|
all_results.extend(hf_results.get("results", [])) |
|
except Exception as e: |
|
|
|
hf_results = keyword_search_hf_spaces(query, limit) |
|
all_results.extend(hf_results.get("results", [])) |
|
|
|
if "smithery" in sources: |
|
try: |
|
smithery_results = semantic_search_smithery(query, limit, os_type) |
|
all_results.extend(smithery_results.get("results", [])) |
|
except Exception as e: |
|
|
|
smithery_results = keyword_search_smithery(query, limit, os_type) |
|
all_results.extend(smithery_results.get("results", [])) |
|
|
|
return { |
|
"results": all_results, |
|
"total": len(all_results), |
|
"search_type": "semantic" |
|
} |
|
|
|
|
|
with gr.Blocks(title="🚦 Router MCP", css=""" |
|
/* Make JSON output expanded by default */ |
|
.json-viewer-container { |
|
display: block !important; |
|
} |
|
.json-viewer-container > .json-viewer-header { |
|
display: none !important; |
|
} |
|
.json-viewer-container > .json-viewer-content { |
|
display: block !important; |
|
max-height: none !important; |
|
} |
|
.json-viewer-container .json-viewer-item { |
|
display: block !important; |
|
} |
|
.json-viewer-container .json-viewer-item > .json-viewer-header { |
|
display: none !important; |
|
} |
|
.json-viewer-container .json-viewer-item > .json-viewer-content { |
|
display: block !important; |
|
max-height: none !important; |
|
} |
|
/* Additional selectors for nested items */ |
|
.json-viewer-container .json-viewer-item .json-viewer-item { |
|
display: block !important; |
|
} |
|
.json-viewer-container .json-viewer-item .json-viewer-item > .json-viewer-header { |
|
display: none !important; |
|
} |
|
.json-viewer-container .json-viewer-item .json-viewer-item > .json-viewer-content { |
|
display: block !important; |
|
max-height: none !important; |
|
} |
|
/* Title styling */ |
|
.title-container { |
|
text-align: center; |
|
margin: 0.5rem 0; |
|
position: relative; |
|
padding: 0.5rem 0; |
|
overflow: hidden; |
|
} |
|
.title-container h1 { |
|
display: inline-block; |
|
position: relative; |
|
z-index: 1; |
|
font-size: 1.8rem; |
|
margin: 0; |
|
line-height: 1.2; |
|
color: var(--body-text-color); |
|
} |
|
.title-container p { |
|
position: relative; |
|
z-index: 1; |
|
font-size: 1rem; |
|
margin: 0.5rem 0 0 0; |
|
color: var(--body-text-color); |
|
opacity: 0.8; |
|
} |
|
.traffic-light { |
|
position: absolute; |
|
top: 50%; |
|
left: 50%; |
|
transform: translate(-50%, -50%); |
|
width: 500px; |
|
height: 40px; |
|
background: linear-gradient(90deg, |
|
rgba(255, 0, 0, 0.2) 0%, |
|
rgba(255, 165, 0, 0.2) 50%, |
|
rgba(0, 255, 0, 0.2) 100% |
|
); |
|
border-radius: 20px; |
|
z-index: 0; |
|
filter: blur(20px); |
|
} |
|
""") as demo: |
|
with gr.Column(elem_classes=["title-container"]): |
|
gr.HTML(''' |
|
<div class="traffic-light"></div> |
|
<h1>🚦 Router MCP</h1> |
|
<p>Your Gateway to Optimal MCP Servers in Seconds</p> |
|
''') |
|
|
|
with gr.Tabs() as tabs: |
|
with gr.Tab("Overview"): |
|
gr.Markdown(""" |
|
<span style="font-size: 1.15em"> Router MCP is a powerful tool that helps you discover and connect to MCP servers. |
|
Whether you're looking for specific functionality or exploring new possibilities, |
|
Router MCP makes it easy to find the perfect MCP server for your needs.</span> |
|
""") |
|
|
|
gr.Markdown(""" |
|
## 🎥 Video Demo |
|
""") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
gr.Video( |
|
value="demo.mp4", |
|
label="Router MCP Demo Video", |
|
interactive=False, |
|
width=640 |
|
) |
|
with gr.Column(): |
|
pass |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
gr.Markdown(""" |
|
## 🎯 How to Use Router MCP |
|
|
|
1. **Enter Your Query** |
|
- Type a natural language description of the MCP Server you're looking for |
|
- Be as specific or general as you need |
|
|
|
2. **Select Search Sources** |
|
- Choose where to search for MCP Servers |
|
- Currently supports Hugging Face Spaces and Smithery |
|
- Note: Anthropic's Registry is under development and not yet available |
|
|
|
3. **Choose Your OS** |
|
- Select your operating system (Mac/Linux, Windows, or WSL) |
|
- This ensures you get the correct configuration format for your system |
|
|
|
4. **Choose Search Type** |
|
- **Keyword Search**: Use when you have specific terms or names in mind |
|
- **Semantic Search**: Use when you want to find servers based on meaning and intent |
|
- Both methods will return ready-to-use MCP configurations |
|
""") |
|
with gr.Column(): |
|
gr.Markdown(""" |
|
## 📊 Understanding Search Results |
|
|
|
The search results will show MCP Servers from different sources, each with their own format: |
|
|
|
#### Hugging Face Spaces Results |
|
- **id**: The Space's unique identifier |
|
- **likes**: Number of likes the Space has received |
|
- **trending_score**: The Space's popularity score |
|
- **source**: Always "huggingface" |
|
- **configuration**: Ready-to-use MCP configuration for SSE connection |
|
|
|
#### Smithery Results |
|
- **id**: The server's qualified name (e.g., "author/server-name") |
|
- **name**: Display name of the server |
|
- **description**: Detailed description of the server's capabilities |
|
- **likes**: Number of times the server has been used |
|
- **source**: Always "smithery" |
|
- **configuration**: OS-specific MCP configuration (requires your Smithery key) |
|
|
|
> Note: For Smithery servers, you'll need to replace "YOUR_SMITHERY_KEY" in the configuration with your actual Smithery API key. |
|
|
|
> Note: When using Semantic Search, each result includes a similarity score (0-1) that indicates how well the server matches your query's meaning. Higher scores (closer to 1) indicate better semantic matches. |
|
""") |
|
|
|
gr.Markdown(""" |
|
## 🚀 Upcoming Features |
|
|
|
We're constantly working to improve Router MCP. Here's what's coming soon: |
|
""") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
gr.Markdown(""" |
|
#### 🔄 Enhanced Integration |
|
- Integration with Anthropic's Registry for comprehensive MCP server discovery |
|
- Complete support for Smithery search capabilities |
|
- Enhanced server discovery with improved filtering and sorting options |
|
""") |
|
with gr.Column(): |
|
gr.Markdown(""" |
|
#### ⚡️ Automated Setup |
|
- One-click MCP server addition to your client |
|
- Automatic configuration generation and validation |
|
- Seamless integration with popular MCP clients |
|
""") |
|
|
|
with gr.Tab("Try Router MCP"): |
|
with gr.Row(): |
|
with gr.Column(): |
|
gr.Markdown("### Search MCP servers using natural language query") |
|
query_input = gr.Textbox( |
|
label="Describe the MCP Server you're looking for", |
|
placeholder="e.g., 'I need an MCP Server that can generate images'" |
|
) |
|
|
|
gr.Markdown("### Select sources to search") |
|
hf_checkbox = gr.Checkbox(label="Hugging Face Spaces", value=True) |
|
smithery_checkbox = gr.Checkbox(label="Smithery", value=False) |
|
registry_checkbox = gr.Checkbox(label="Registry (Coming Soon)", value=False, interactive=False) |
|
|
|
result_limit = gr.Number( |
|
label="Maximum number of results for each source", |
|
value=3, |
|
minimum=1, |
|
maximum=20, |
|
step=1 |
|
) |
|
|
|
gr.Markdown("### Select your OS") |
|
client_radio = gr.Radio( |
|
choices=["Mac/Linux", "Windows", "WSL"], |
|
label="Choose your operating system to get the appropriate command format", |
|
value="Mac/Linux", |
|
interactive=True, |
|
elem_id="client_radio" |
|
) |
|
|
|
with gr.Row(): |
|
keyword_search_button = gr.Button("Keyword Search") |
|
semantic_search_button = gr.Button("Semantic Search") |
|
|
|
with gr.Column(): |
|
results_output = gr.JSON( |
|
label="Search Results", |
|
elem_id="results_output" |
|
) |
|
|
|
with gr.Tab("How to Integrate"): |
|
gr.Markdown(""" |
|
## Integration Guide |
|
|
|
To integrate Router MCP with your preferred client, follow these instructions: |
|
|
|
### For SSE-Supported Clients |
|
For clients that support SSE (e.g., Cursor, Windsurf, Cline), add this configuration to your MCP config: |
|
|
|
```json |
|
{ |
|
"mcpServers": { |
|
"gradio": { |
|
"url": "https://agents-mcp-hackathon-router-mcp.hf.space/gradio_api/mcp/sse" |
|
} |
|
} |
|
} |
|
``` |
|
|
|
### For stdio-Only Clients |
|
For clients that only support stdio (e.g., Claude Desktop): |
|
1. First, install Node.js |
|
2. Add this configuration to your MCP config: |
|
|
|
```json |
|
{ |
|
"mcpServers": { |
|
"gradio": { |
|
"command": "npx", |
|
"args": [ |
|
"mcp-remote", |
|
"https://agents-mcp-hackathon-router-mcp.hf.space/gradio_api/mcp/sse", |
|
"--transport", |
|
"sse-only" |
|
] |
|
} |
|
} |
|
} |
|
``` |
|
|
|
For more information about MCP, check out the [Gradio documentation](https://gradio.app/docs/mcp). |
|
""") |
|
|
|
|
|
def get_sources(): |
|
return ["huggingface" if hf_checkbox.value else "", "smithery" if smithery_checkbox.value else ""] |
|
|
|
def handle_keyword_mcp_search(query: str, hf: bool, sm: bool, limit: int, os_type: str) -> Dict: |
|
""" |
|
Handle keyword-based search for MCP servers across selected sources. If the client (such as Cursor or Claude) encounters a task it cannot handle, it can use MCP to search for other tools that can. |
|
Use this search when you know the specific name or keywords of the MCP Server you're looking for. |
|
|
|
Args: |
|
query (str): The search query string to find matching MCP servers |
|
hf (bool): Whether to include Hugging Face Spaces in the search |
|
sm (bool): Whether to include Smithery in the search |
|
limit (int): Maximum number of results to return per source |
|
os_type (str): Operating system type ("Mac/Linux", "Windows", "WSL") |
|
|
|
Returns: |
|
Dict: A dictionary containing the search results with the following keys: |
|
- results: List of found MCP servers with their configurations. Each configuration can be added to the MCP Client's config file to register the server. |
|
- total: Total number of results |
|
- search_type: Type of search performed ("keyword") |
|
""" |
|
return keyword_search( |
|
query, |
|
["huggingface" if hf else "", "smithery" if sm else ""], |
|
int(limit), |
|
os_type |
|
) |
|
|
|
def handle_semantic_mcp_search(query: str, hf: bool, sm: bool, limit: int, os_type: str) -> Dict: |
|
""" |
|
Handle semantic embedding-based search for MCP servers across selected sources. If the client (such as Cursor or Claude) encounters a task it cannot handle, it can use MCP to search for other tools that can. |
|
Use this search when your query is more abstract or conceptual, as it can understand the meaning and context of your request. |
|
|
|
Args: |
|
query (str): The natural language search query to find semantically similar MCP servers |
|
hf (bool): Whether to include Hugging Face Spaces in the search |
|
sm (bool): Whether to include Smithery in the search |
|
limit (int): Maximum number of results to return per source |
|
os_type (str): Operating system type ("Mac/Linux", "Windows", "WSL") |
|
|
|
Returns: |
|
Dict: A dictionary containing the search results with the following keys: |
|
- results: List of found MCP servers with their configurations and similarity scores. Each configuration can be added to the MCP Client's config file to register the server. |
|
- total: Total number of results |
|
- search_type: Type of search performed ("semantic") |
|
""" |
|
return semantic_search( |
|
query, |
|
["huggingface" if hf else "", "smithery" if sm else ""], |
|
int(limit), |
|
os_type |
|
) |
|
|
|
keyword_search_button.click( |
|
fn=handle_keyword_mcp_search, |
|
inputs=[query_input, hf_checkbox, smithery_checkbox, result_limit, client_radio], |
|
outputs=results_output |
|
) |
|
|
|
semantic_search_button.click( |
|
fn=handle_semantic_mcp_search, |
|
inputs=[query_input, hf_checkbox, smithery_checkbox, result_limit, client_radio], |
|
outputs=results_output |
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch(mcp_server=True) |