nbaldwin commited on
Commit
6f93da3
·
1 Parent(s): b6b3916

Getting ready for merge

Browse files
Files changed (2) hide show
  1. ChatHumanFlowModule.py +5 -2
  2. run.py +1 -8
ChatHumanFlowModule.py CHANGED
@@ -61,6 +61,8 @@ class ChatHumanFlowModule(CompositeFlow):
61
  additional_transformations = [self.regex_extractor, self.end_of_interaction]
62
  )
63
 
 
 
64
  def set_up_flow_state(self):
65
  """ This method sets up the flow state. It is called when the flow is executed."""
66
  super().set_up_flow_state()
@@ -106,8 +108,9 @@ class ChatHumanFlowModule(CompositeFlow):
106
  :param input_message: The input message to the flow.
107
  :type input_message: FlowMessage
108
  """
109
- self.flow_state["assistant_outputs"].append(input_message.data["api_output"])
110
- message = self.package_input_message(data=input_message.data)
 
111
 
112
  if self.max_rounds_reached():
113
  self.generate_reply()
 
61
  additional_transformations = [self.regex_extractor, self.end_of_interaction]
62
  )
63
 
64
+ self.input_interface_user = KeyInterface()
65
+
66
  def set_up_flow_state(self):
67
  """ This method sets up the flow state. It is called when the flow is executed."""
68
  super().set_up_flow_state()
 
108
  :param input_message: The input message to the flow.
109
  :type input_message: FlowMessage
110
  """
111
+ msg = self.input_interface_user(input_message)###I ADDED THIS
112
+ self.flow_state["assistant_outputs"].append(msg.data["api_output"])###I
113
+ message = self.package_input_message(data=msg.data)
114
 
115
  if self.max_rounds_reached():
116
  self.generate_reply()
run.py CHANGED
@@ -31,7 +31,6 @@ flow_verse.sync_dependencies(dependencies)
31
  if __name__ == "__main__":
32
 
33
  #1. ~~~~~ Set up a colink server ~~~~
34
- FLOW_MODULES_PATH = "./"
35
 
36
  cl = start_colink_server()
37
 
@@ -78,13 +77,7 @@ if __name__ == "__main__":
78
  data = {"id": 0, "query": "I want to ask you a few questions"} # This can be a list of samples
79
  # data = {"id": 0, "question": "Who was the NBA champion in 2023?"} # This can be a list of samples
80
 
81
- #option1: use the FlowMessage class
82
- input_message = FlowMessage(
83
- data=data,
84
- )
85
-
86
- #option2: use the proxy_flow
87
- #input_message = proxy_flow.package_input_message(data = data)
88
 
89
  #7. ~~~ Run inference ~~~
90
  future = proxy_flow.get_reply_future(input_message)
 
31
  if __name__ == "__main__":
32
 
33
  #1. ~~~~~ Set up a colink server ~~~~
 
34
 
35
  cl = start_colink_server()
36
 
 
77
  data = {"id": 0, "query": "I want to ask you a few questions"} # This can be a list of samples
78
  # data = {"id": 0, "question": "Who was the NBA champion in 2023?"} # This can be a list of samples
79
 
80
+ input_message = proxy_flow.package_input_message(data = data)
 
 
 
 
 
 
81
 
82
  #7. ~~~ Run inference ~~~
83
  future = proxy_flow.get_reply_future(input_message)