atharva-nlp commited on
Commit
1ae0aa6
·
verified ·
1 Parent(s): 70068ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -4
app.py CHANGED
@@ -14,7 +14,7 @@ from transformers.agents.search import DuckDuckGoSearchTool
14
 
15
  logging.basicConfig(level=logging.INFO)
16
 
17
- logging.info('App loading')
18
 
19
  # Import tool from Hub
20
  image_generation_tool = load_tool("m-ric/text-to-image")
@@ -26,7 +26,7 @@ search_tool = DuckDuckGoSearchTool()
26
  # Initialize the agent with the image generation tool
27
  agent = ReactCodeAgent(tools=[image_generation_tool], llm_engine=llm_engine, additional_authorized_imports=['requests', 'bs4'] , add_base_tools=True)
28
 
29
- logging.info('App initialized')
30
 
31
  # Initializes your app with your bot token and socket mode handler
32
  app = App(token=os.environ.get("SLACK_BOT_TOKEN"))
@@ -66,6 +66,10 @@ def respond_in_assistant_thread(
66
  # Tell the human user the assistant bot acknowledges the request and is working on it
67
  set_status("Agent is hard at work...")
68
 
 
 
 
 
69
  # Collect the conversation history with this user
70
  replies_in_thread = client.conversations_replies(
71
  channel=context.channel_id,
@@ -79,7 +83,7 @@ def respond_in_assistant_thread(
79
  messages_in_thread.append({"role": role, "content": message["text"]})
80
 
81
  # Pass the latest prompt and chat history to the LLM (call_llm is your own code)
82
- returned_message = call_llm('What does SLACK ( software ) stands for ?')
83
 
84
  # Post the result in the assistant thread
85
  say(text=returned_message)
@@ -95,5 +99,5 @@ app.use(assistant)
95
 
96
  # Start your app
97
  if __name__ == "__main__":
98
- logging.info('Listening on socket mode')
99
  SocketModeHandler(app, os.environ["SLACK_APP_TOKEN"]).start()
 
14
 
15
  logging.basicConfig(level=logging.INFO)
16
 
17
+ logging.info('App loading\n')
18
 
19
  # Import tool from Hub
20
  image_generation_tool = load_tool("m-ric/text-to-image")
 
26
  # Initialize the agent with the image generation tool
27
  agent = ReactCodeAgent(tools=[image_generation_tool], llm_engine=llm_engine, additional_authorized_imports=['requests', 'bs4'] , add_base_tools=True)
28
 
29
+ logging.info('App initialized \n')
30
 
31
  # Initializes your app with your bot token and socket mode handler
32
  app = App(token=os.environ.get("SLACK_BOT_TOKEN"))
 
66
  # Tell the human user the assistant bot acknowledges the request and is working on it
67
  set_status("Agent is hard at work...")
68
 
69
+ query = payload['blocks'][0]['elements'][0]['elements'][0]['text']
70
+
71
+ logging.info('Query ' + query + '\n')
72
+
73
  # Collect the conversation history with this user
74
  replies_in_thread = client.conversations_replies(
75
  channel=context.channel_id,
 
83
  messages_in_thread.append({"role": role, "content": message["text"]})
84
 
85
  # Pass the latest prompt and chat history to the LLM (call_llm is your own code)
86
+ returned_message = call_llm(query)
87
 
88
  # Post the result in the assistant thread
89
  say(text=returned_message)
 
99
 
100
  # Start your app
101
  if __name__ == "__main__":
102
+ logging.info('Listening on socket mode \n')
103
  SocketModeHandler(app, os.environ["SLACK_APP_TOKEN"]).start()