nbaldwin commited on
Commit
0b2483c
·
1 Parent(s): 1c54ca4

Clean Versions

Browse files
Files changed (4) hide show
  1. ChatHumanFlowModule.py +21 -9
  2. ChatHumanFlowModule.yaml +7 -1
  3. demo.yaml +10 -2
  4. run.py +10 -13
ChatHumanFlowModule.py CHANGED
@@ -47,7 +47,6 @@ class ChatHumanFlowModule(CompositeFlow):
47
  :type \**kwargs: Dict[str, Any]
48
  """
49
 
50
-
51
  def __init__(self, **kwargs):
52
  super().__init__(**kwargs)
53
 
@@ -65,7 +64,7 @@ class ChatHumanFlowModule(CompositeFlow):
65
  def set_up_flow_state(self):
66
  """ This method sets up the flow state. It is called when the flow is executed."""
67
  super().set_up_flow_state()
68
- self.flow_state["last_flow_called"] = None
69
  self.flow_state["current_round"] = 0
70
  self.flow_state["user_inputs"] = []
71
  self.flow_state["assistant_outputs"] = []
@@ -78,10 +77,14 @@ class ChatHumanFlowModule(CompositeFlow):
78
  return "OpenAIChatHumanFlowModule"
79
 
80
  def max_rounds_reached(self):
 
81
  return self.flow_config["max_rounds"] is not None and self.flow_state["current_round"] >= self.flow_config["max_rounds"]
82
 
83
  def generate_reply(self):
84
-
 
 
 
85
  reply = self.package_output_message(
86
  input_message = self.flow_state["input_message"],
87
  response = {
@@ -98,7 +101,11 @@ class ChatHumanFlowModule(CompositeFlow):
98
 
99
 
100
  def call_to_user(self,input_message):
 
101
 
 
 
 
102
  self.flow_state["assistant_outputs"].append(input_message.data["api_output"])
103
 
104
  if self.max_rounds_reached():
@@ -108,16 +115,21 @@ class ChatHumanFlowModule(CompositeFlow):
108
  input_message,
109
  self.get_instance_id(),
110
  )
111
- self.flow_state["last_flow_called"] = "User"
112
 
113
  self.flow_state["current_round"] += 1
114
 
115
 
116
 
117
  def call_to_assistant(self,input_message):
 
 
 
 
 
118
  message = self.input_interface_assistant(input_message)
119
 
120
- if self.flow_state["last_flow_called"] is None:
121
  self.flow_state["input_message"] = input_message
122
 
123
  else:
@@ -132,7 +144,7 @@ class ChatHumanFlowModule(CompositeFlow):
132
  message,
133
  self.get_instance_id()
134
  )
135
- self.flow_state["last_flow_called"] = "Assistant"
136
 
137
  def run(self,input_message: FlowMessage):
138
  """ This method runs the flow. It is the main method of the flow and it is called when the flow is executed.
@@ -140,13 +152,13 @@ class ChatHumanFlowModule(CompositeFlow):
140
  :param input_message: The input message to the flow.
141
  :type input_message: FlowMessage
142
  """
143
- last_flow_called = self.flow_state["last_flow_called"]
144
 
145
- if last_flow_called is None or last_flow_called == "User":
146
  self.call_to_assistant(input_message=input_message)
147
 
148
 
149
- elif last_flow_called == "Assistant":
150
  self.call_to_user(input_message=input_message)
151
 
152
 
 
47
  :type \**kwargs: Dict[str, Any]
48
  """
49
 
 
50
  def __init__(self, **kwargs):
51
  super().__init__(**kwargs)
52
 
 
64
  def set_up_flow_state(self):
65
  """ This method sets up the flow state. It is called when the flow is executed."""
66
  super().set_up_flow_state()
67
+ self.flow_state["last_state"] = None
68
  self.flow_state["current_round"] = 0
69
  self.flow_state["user_inputs"] = []
70
  self.flow_state["assistant_outputs"] = []
 
77
  return "OpenAIChatHumanFlowModule"
78
 
79
  def max_rounds_reached(self):
80
+ """ This method checks if the maximum number of rounds has been reached. If the maximum number of rounds has been reached, it returns True. Otherwise, it returns False."""
81
  return self.flow_config["max_rounds"] is not None and self.flow_state["current_round"] >= self.flow_config["max_rounds"]
82
 
83
  def generate_reply(self):
84
+ """ This method generates the reply message. It is called when the interaction is finished.
85
+ :param input_message: The input message to the flow.
86
+ :type input_message: FlowMessage
87
+ """
88
  reply = self.package_output_message(
89
  input_message = self.flow_state["input_message"],
90
  response = {
 
101
 
102
 
103
  def call_to_user(self,input_message):
104
+ """ This method calls the User Flow. (Human)
105
 
106
+ :param input_message: The input message to the flow.
107
+ :type input_message: FlowMessage
108
+ """
109
  self.flow_state["assistant_outputs"].append(input_message.data["api_output"])
110
 
111
  if self.max_rounds_reached():
 
115
  input_message,
116
  self.get_instance_id(),
117
  )
118
+ self.flow_state["last_state"] = "User"
119
 
120
  self.flow_state["current_round"] += 1
121
 
122
 
123
 
124
  def call_to_assistant(self,input_message):
125
+ """ This method calls the Assistant Flow.
126
+
127
+ :param input_message: The input message to the flow.
128
+ :type input_message: FlowMessage
129
+ """
130
  message = self.input_interface_assistant(input_message)
131
 
132
+ if self.flow_state["last_state"] is None:
133
  self.flow_state["input_message"] = input_message
134
 
135
  else:
 
144
  message,
145
  self.get_instance_id()
146
  )
147
+ self.flow_state["last_state"] = "Assistant"
148
 
149
  def run(self,input_message: FlowMessage):
150
  """ This method runs the flow. It is the main method of the flow and it is called when the flow is executed.
 
152
  :param input_message: The input message to the flow.
153
  :type input_message: FlowMessage
154
  """
155
+ last_state = self.flow_state["last_state"]
156
 
157
+ if last_state is None or last_state == "User":
158
  self.call_to_assistant(input_message=input_message)
159
 
160
 
161
+ elif last_state == "Assistant":
162
  self.call_to_user(input_message=input_message)
163
 
164
 
ChatHumanFlowModule.yaml CHANGED
@@ -1,6 +1,6 @@
1
  name: "ChatInteractiveFlow"
2
  description: "Flow that enables chatting between a ChatAtomicFlow and a user providing the input."
3
-
4
  max_rounds: null # Run until early exit is detected
5
 
6
  input_interface:
@@ -26,8 +26,14 @@ end_of_interaction:
26
  subflows_config:
27
  Assistant:
28
  _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
 
 
 
29
  User:
30
  _target_: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
 
 
 
31
 
32
 
33
 
 
1
  name: "ChatInteractiveFlow"
2
  description: "Flow that enables chatting between a ChatAtomicFlow and a user providing the input."
3
+ _target_: flow_modules.aiflows.ChatInteractiveFlowModule.ChatHumanFlowModule.instantiate_from_default_config
4
  max_rounds: null # Run until early exit is detected
5
 
6
  input_interface:
 
26
  subflows_config:
27
  Assistant:
28
  _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
29
+ flow_class_name: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow
30
+ flow_endpoint: Assistant
31
+ user_id: local
32
  User:
33
  _target_: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
34
+ flow_class_name: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow
35
+ flow_endpoint: User
36
+ user_id: local
37
 
38
 
39
 
demo.yaml CHANGED
@@ -3,7 +3,11 @@ max_rounds: 2
3
  _target_: flow_modules.aiflows.ChatInteractiveFlowModule.ChatHumanFlowModule.instantiate_from_default_config
4
  subflows_config:
5
  Assistant:
6
- _target_: flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow.instantiate_from_default_config
 
 
 
 
7
  backend:
8
  _target_: aiflows.backends.llm_lite.LiteLLMBackend
9
  api_infos: ???
@@ -13,7 +17,11 @@ subflows_config:
13
  input_interface_non_initialized: []
14
 
15
  User:
16
- _target_: flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow.instantiate_from_default_config
 
 
 
 
17
  request_multi_line_input_flag: False
18
  query_message_prompt_template:
19
  _target_: aiflows.prompt_template.JinjaPrompt
 
3
  _target_: flow_modules.aiflows.ChatInteractiveFlowModule.ChatHumanFlowModule.instantiate_from_default_config
4
  subflows_config:
5
  Assistant:
6
+ name: "Assistantflow"
7
+ description: "A flow that represents the assistant."
8
+ _target_: aiflows.base_flows.AtomicFlow.instantiate_from_default_config
9
+ user_id: local
10
+ flow_endpoint: Assistant
11
  backend:
12
  _target_: aiflows.backends.llm_lite.LiteLLMBackend
13
  api_infos: ???
 
17
  input_interface_non_initialized: []
18
 
19
  User:
20
+ _target_: aiflows.base_flows.AtomicFlow.instantiate_from_default_config
21
+ user_id: local
22
+ flow_endpoint: User
23
+ name: "User"
24
+ description: "A flow that represents the user."
25
  request_multi_line_input_flag: False
26
  query_message_prompt_template:
27
  _target_: aiflows.prompt_template.JinjaPrompt
run.py CHANGED
@@ -57,25 +57,22 @@ if __name__ == "__main__":
57
 
58
  #3. ~~~~ Serve The Flow ~~~~
59
  serve_utils.recursive_serve_flow(
60
- cl = cl,
61
- flow_type="ChatInteractiveFlowModule",
62
- default_config=cfg,
63
- default_state=None,
64
- default_dispatch_point="coflows_dispatch"
65
  )
66
 
 
67
  #4. ~~~~~Start A Worker Thread~~~~~
68
- run_dispatch_worker_thread(cl, dispatch_point="coflows_dispatch", flow_modules_base_path=FLOW_MODULES_PATH)
69
 
70
  #5. ~~~~~Mount the flow and get its proxy~~~~~~
71
- proxy_flow = serve_utils.recursive_mount(
72
  cl=cl,
73
- client_id="local",
74
- flow_type="ChatInteractiveFlowModule",
75
- config_overrides=None,
76
- initial_state=None,
77
- dispatch_point_override=None,
78
- )
79
 
80
  #6. ~~~ Get the data ~~~
81
  data = {"id": 0, "query": "I want to ask you a few questions"} # This can be a list of samples
 
57
 
58
  #3. ~~~~ Serve The Flow ~~~~
59
  serve_utils.recursive_serve_flow(
60
+ cl=cl,
61
+ flow_class_name="flow_modules.aiflows.ChatInteractiveFlowModule.ChatHumanFlowModule",
62
+ flow_endpoint="ChatHumanFlowModule",
 
 
63
  )
64
 
65
+
66
  #4. ~~~~~Start A Worker Thread~~~~~
67
+ run_dispatch_worker_thread(cl)
68
 
69
  #5. ~~~~~Mount the flow and get its proxy~~~~~~
70
+ proxy_flow= serve_utils.get_flow_instance(
71
  cl=cl,
72
+ flow_endpoint="ChatHumanFlowModule",
73
+ user_id="local",
74
+ config_overrides= cfg
75
+ )
 
 
76
 
77
  #6. ~~~ Get the data ~~~
78
  data = {"id": 0, "query": "I want to ask you a few questions"} # This can be a list of samples