ric9176 commited on
Commit
d1b9872
·
1 Parent(s): 5cb5f85

feat: add interface, refactor state nodes and app.py for UX

Browse files
agent/utils/nodes.py CHANGED
@@ -175,8 +175,11 @@ async def write_memory(state: AgentState, config: RunnableConfig, store: BaseSto
175
  # Store the updated memory using async interface
176
  await store.aput(namespace, "user_memory", {"memory": new_memory.content})
177
 
178
-
179
- return state
 
 
 
180
 
181
  # Initialize tool node
182
  tool_node = ToolNode(tool_belt)
 
175
  # Store the updated memory using async interface
176
  await store.aput(namespace, "user_memory", {"memory": new_memory.content})
177
 
178
+ # Update the state with the new memory
179
+ return {
180
+ **state,
181
+ "user_memories": {"memory": new_memory.content}
182
+ }
183
 
184
  # Initialize tool node
185
  tool_node = ToolNode(tool_belt)
agent/utils/state.py CHANGED
@@ -1,6 +1,7 @@
1
- from typing import Annotated, TypedDict
2
  from langgraph.graph.message import add_messages
3
 
4
  class AgentState(TypedDict):
5
  messages: Annotated[list, add_messages]
6
  context: list # Store retrieved context
 
 
1
+ from typing import Annotated, TypedDict, Optional
2
  from langgraph.graph.message import add_messages
3
 
4
  class AgentState(TypedDict):
5
  messages: Annotated[list, add_messages]
6
  context: list # Store retrieved context
7
+ user_memories: Optional[dict] # Store user memory information
interfaces/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """Interfaces package for Chief Joy Officer."""
interfaces/chainlit/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """Chainlit interface for Chief Joy Officer."""
interfaces/chainlit/app.py CHANGED
@@ -1,11 +1,19 @@
1
  import uuid
 
 
2
  from langchain_core.messages import HumanMessage, AIMessage, AIMessageChunk
3
  from langchain.schema.runnable.config import RunnableConfig
4
  import chainlit as cl
 
 
 
 
 
 
 
 
5
  from agent import create_agent_graph, get_checkpointer
6
  from agent.utils.state import AgentState
7
- import os
8
- import json
9
 
10
  SHORT_TERM_MEMORY_DB_PATH = "data/short_term.db"
11
 
@@ -19,9 +27,6 @@ async def on_chat_start():
19
  session_id = str(uuid.uuid4())
20
  cl.user_session.set("session_id", session_id)
21
 
22
- # Initialize empty message history
23
- cl.user_session.set("message_history", [])
24
-
25
  welcome_message = cl.Message(
26
  content="Hello! I'm your chief joy officer, here to help you with finding fun things to do in London!",
27
  author="Assistant"
@@ -30,21 +35,14 @@ async def on_chat_start():
30
 
31
  except Exception as e:
32
  print(f"Error in chat initialization: {str(e)}")
33
- error_message = cl.Message(
34
  content="I apologize, but I encountered an error during initialization. Please try refreshing the page.",
35
  author="System"
36
- )
37
- await error_message.send()
38
 
39
  @cl.on_message
40
  async def on_message(message: cl.Message):
41
  """Handle incoming messages and stream responses"""
42
- # Get or create session ID
43
- session_id = cl.user_session.get("session_id")
44
- if not session_id:
45
- session_id = str(uuid.uuid4())
46
- cl.user_session.set("session_id", session_id)
47
-
48
  # Initialize response message
49
  msg = cl.Message(content="")
50
 
@@ -54,39 +52,24 @@ async def on_message(message: cl.Message):
54
  # Create graph with memory
55
  graph = await create_agent_graph(saver)
56
 
57
- # Get message history and add current message
58
- message_history = cl.user_session.get("message_history", [])
59
- current_message = HumanMessage(content=message.content)
60
- message_history.append(current_message)
61
 
62
- # Create current state
63
- current_state = AgentState(
64
- messages=message_history,
65
- context=cl.user_session.get("last_context", [])
66
- )
67
-
68
- # Stream the response
69
  async for chunk in graph.astream(
70
- current_state,
71
- config={"configurable": {"thread_id": session_id}},
72
  stream_mode="messages"
73
  ):
74
- # Handle different node outputs
75
- if isinstance(chunk[0], AIMessageChunk):
 
76
  await msg.stream_token(chunk[0].content)
77
- elif isinstance(chunk[0], AIMessage):
78
- if chunk[0] not in message_history:
79
- message_history.append(chunk[0])
80
 
81
  # Get final state
82
  final_state = await graph.aget_state(
83
  config={"configurable": {"thread_id": session_id}}
84
  )
85
-
86
- # Update session state
87
- if final_state:
88
- cl.user_session.set("message_history", message_history)
89
- cl.user_session.set("last_context", final_state.values.get("context", []))
90
 
91
  # Send the final message
92
  await msg.send()
 
1
  import uuid
2
+ import os
3
+ import json
4
  from langchain_core.messages import HumanMessage, AIMessage, AIMessageChunk
5
  from langchain.schema.runnable.config import RunnableConfig
6
  import chainlit as cl
7
+ import sys
8
+ from pathlib import Path
9
+
10
+ # Add the project root to the Python path
11
+ project_root = str(Path(__file__).parent.parent.parent)
12
+ if project_root not in sys.path:
13
+ sys.path.append(project_root)
14
+
15
  from agent import create_agent_graph, get_checkpointer
16
  from agent.utils.state import AgentState
 
 
17
 
18
  SHORT_TERM_MEMORY_DB_PATH = "data/short_term.db"
19
 
 
27
  session_id = str(uuid.uuid4())
28
  cl.user_session.set("session_id", session_id)
29
 
 
 
 
30
  welcome_message = cl.Message(
31
  content="Hello! I'm your chief joy officer, here to help you with finding fun things to do in London!",
32
  author="Assistant"
 
35
 
36
  except Exception as e:
37
  print(f"Error in chat initialization: {str(e)}")
38
+ await cl.Message(
39
  content="I apologize, but I encountered an error during initialization. Please try refreshing the page.",
40
  author="System"
41
+ ).send()
 
42
 
43
  @cl.on_message
44
  async def on_message(message: cl.Message):
45
  """Handle incoming messages and stream responses"""
 
 
 
 
 
 
46
  # Initialize response message
47
  msg = cl.Message(content="")
48
 
 
52
  # Create graph with memory
53
  graph = await create_agent_graph(saver)
54
 
55
+ # Get session ID
56
+ session_id = cl.user_session.get("session_id")
 
 
57
 
58
+ # Process through graph with current message
 
 
 
 
 
 
59
  async for chunk in graph.astream(
60
+ {"messages": [HumanMessage(content=message.content)]},
61
+ {"configurable": {"thread_id": session_id}},
62
  stream_mode="messages"
63
  ):
64
+ if chunk[1]["langgraph_node"] == "agent" and isinstance(
65
+ chunk[0], (AIMessageChunk, AIMessage)
66
+ ):
67
  await msg.stream_token(chunk[0].content)
 
 
 
68
 
69
  # Get final state
70
  final_state = await graph.aget_state(
71
  config={"configurable": {"thread_id": session_id}}
72
  )
 
 
 
 
 
73
 
74
  # Send the final message
75
  await msg.send()