Spaces:
Running
Running
File size: 1,435 Bytes
212becf 99c2f4b 212becf 99c2f4b 212becf 99c2f4b 212becf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
from textblob import TextBlob
import gradio as gr
from mcp.client.stdio import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection
from smolagents.mcp_client import MCPClient
def sentiment_analysis(text: str) -> dict:
"""
Analyze the sentiment of the given text.
Args:
text (str): The text to analyze
Returns:
dict: A dictionary containing polarity, subjectivity, and assessment
"""
blob = TextBlob(text)
sentiment = blob.sentiment
return {
"polarity": round(sentiment.polarity, 2), # -1 (negative) to 1 (positive)
"subjectivity": round(sentiment.subjectivity, 2), # 0 (objective) to 1 (subjective)
"assessment": "positive" if sentiment.polarity > 0 else "negative" if sentiment.polarity < 0 else "neutral"
}
mcp_client = MCPClient(
{"url": "https://slimanemakh-mcp-course.hf.space/gradio_api/mcp/sse"}
)
tools = mcp_client.get_tools()
model = InferenceClientModel()
agent = CodeAgent(tools=[*tools], model=model)
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
examples=["Prime factorization of 68"],
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to answer questions.",
messages=[],
)
# Launch the interface and MCP server
if __name__ == "__main__":
demo.launch(mcp_server=True) |