Spaces:
Build error
Build error
| import os | |
| from typing import TypedDict, Annotated | |
| from dotenv import load_dotenv | |
| from langgraph.graph.message import add_messages | |
| from langchain_core.messages import AnyMessage, HumanMessage, SystemMessage | |
| from langgraph.prebuilt import ToolNode | |
| from langgraph.graph import START, StateGraph | |
| from langgraph.prebuilt import tools_condition | |
| from langgraph.checkpoint.memory import MemorySaver | |
| from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace | |
| from langfuse.callback import CallbackHandler | |
| from retriever import guest_info_tool | |
| from tools import search_tool, weather_info_tool, hub_stats_tool | |
| import gradio as gr | |
| load_dotenv() | |
| # Initialize Langfuse | |
| langfuse_handler = CallbackHandler( | |
| secret_key=os.getenv("LANGFUSE_SECRET_KEY"), | |
| public_key=os.getenv("LANGFUSE_PUBLIC_KEY"), | |
| host=os.getenv("LANGFUSE_HOST"), | |
| ) | |
| # Generate the chat interface, including the tools | |
| llm = HuggingFaceEndpoint( | |
| repo_id="Qwen/Qwen2.5-Coder-32B-Instruct", | |
| huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN") | |
| ) | |
| chat = ChatHuggingFace(llm=llm, verbose=True) | |
| tools = [guest_info_tool, search_tool, weather_info_tool, hub_stats_tool] | |
| chat_with_tools = chat.bind_tools(tools) | |
| # Define the AgentState structure | |
| class AgentState(TypedDict): | |
| messages: Annotated[list[AnyMessage], add_messages] | |
| def assistant(state: AgentState): | |
| # Define the system prompt (this is not part of the conversation history) | |
| system_message = SystemMessage(content="""You are Alfred, a helpful and sophisticated assistant. | |
| Your capabilities: | |
| - Answer questions using your knowledge | |
| - Search the web for recent or factual information using the DuckDuckGoSearchResults tool | |
| - Retrieve information about guests using the guest_info_tool | |
| - Use weather_info_tool to give information about the weather | |
| Guidelines: | |
| - Be concise, polite and helpful | |
| - When you don't know something, use the appropriate tool rather than guessing | |
| - For guest information requests, always use the guest_info_tool first | |
| - For factual or current information, use the search tool | |
| - Present information in a clear, organized manner | |
| Always think carefully about which tool is most appropriate for the user's request. | |
| """) | |
| # Call the agent with the system prompt and conversation history (state messages) | |
| assistant_response = chat_with_tools.invoke([system_message] + state.get("messages")) | |
| # Return the updated conversation state including the new assistant response | |
| return {"messages": state.get("messages") + [assistant_response]} | |
| # Build the graph | |
| builder = StateGraph(AgentState) | |
| builder.add_node("assistant", assistant) | |
| builder.add_node("tools", ToolNode(tools)) | |
| builder.add_edge(START, "assistant") | |
| builder.add_conditional_edges("assistant", tools_condition) | |
| builder.add_edge("tools", "assistant") | |
| checkpointer = MemorySaver() | |
| alfred = builder.compile(checkpointer=checkpointer) | |
| config = {"configurable": {"thread_id": "1"}, "callbacks": [langfuse_handler]} | |
| # Gradio chat function | |
| def chat_fn(message, history): | |
| state = {"messages": [HumanMessage(content=message)]} | |
| result = alfred.invoke(state, config) | |
| return result["messages"][-1].content | |
| # Launch Gradio interface | |
| interface = gr.ChatInterface(fn=chat_fn,type="messages") | |
| interface.launch() | |