DrishtiSharma commited on
Commit
7ad4dda
·
verified ·
1 Parent(s): c3bf3f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -3
app.py CHANGED
@@ -207,7 +207,6 @@ def run_agent(
207
 
208
  """
209
  Generate text based on user queries.
210
-
211
  Args:
212
  query: User's query
213
  model: LLM like "gpt-4o"
@@ -215,15 +214,18 @@ def run_agent(
215
  image_urls: List of URLs for images
216
  temperature: Value between 0 and 1. Defaults to 0.7
217
  agent_type: 'Tool Calling' or 'ReAct'
218
-
219
  Return:
220
  generated text
221
-
222
  The chat prompt and message history are stored in
223
  st.session_state variables.
224
  """
225
 
226
  try:
 
 
 
 
 
227
  llm = get_chat_model(model, temperature, [StreamHandler(st.empty())])
228
  if llm is None:
229
  st.error(f"Unsupported model: {model}", icon="🚨")
@@ -236,26 +238,32 @@ def run_agent(
236
 
237
  history_query = {"chat_history": chat_history, "input": query}
238
 
 
239
  message_with_no_image = st.session_state.chat_prompt.invoke(history_query)
240
  message_content = message_with_no_image.messages[0].content
241
 
242
  if image_urls:
 
243
  generated_text = process_with_images(llm, message_content, image_urls)
244
  human_message = HumanMessage(
245
  content=query, additional_kwargs={"image_urls": image_urls}
246
  )
247
  elif tools:
 
248
  generated_text = process_with_tools(
249
  llm, tools, agent_type, st.session_state.agent_prompt, history_query
250
  )
251
  human_message = HumanMessage(content=query)
252
  else:
 
253
  generated_text = llm.invoke(message_with_no_image).content
254
  human_message = HumanMessage(content=query)
255
 
 
256
  if isinstance(generated_text, list):
257
  generated_text = generated_text[0]["text"]
258
 
 
259
  st.session_state.history.append(human_message)
260
  st.session_state.history.append(AIMessage(content=generated_text))
261
 
@@ -266,6 +274,7 @@ def run_agent(
266
  return None
267
 
268
 
 
269
  def openai_create_image(
270
  description: str, model: str="dall-e-3", size: str="1024x1024"
271
  ) -> Optional[str]:
 
207
 
208
  """
209
  Generate text based on user queries.
 
210
  Args:
211
  query: User's query
212
  model: LLM like "gpt-4o"
 
214
  image_urls: List of URLs for images
215
  temperature: Value between 0 and 1. Defaults to 0.7
216
  agent_type: 'Tool Calling' or 'ReAct'
 
217
  Return:
218
  generated text
 
219
  The chat prompt and message history are stored in
220
  st.session_state variables.
221
  """
222
 
223
  try:
224
+ # Ensure retriever tool is included in tools
225
+ if "Retrieval" in st.session_state.tool_names[0] and st.session_state.retriever_tool:
226
+ if st.session_state.retriever_tool not in tools:
227
+ tools.append(st.session_state.retriever_tool)
228
+
229
  llm = get_chat_model(model, temperature, [StreamHandler(st.empty())])
230
  if llm is None:
231
  st.error(f"Unsupported model: {model}", icon="🚨")
 
238
 
239
  history_query = {"chat_history": chat_history, "input": query}
240
 
241
+ # Generate message content
242
  message_with_no_image = st.session_state.chat_prompt.invoke(history_query)
243
  message_content = message_with_no_image.messages[0].content
244
 
245
  if image_urls:
246
+ # Handle images if provided
247
  generated_text = process_with_images(llm, message_content, image_urls)
248
  human_message = HumanMessage(
249
  content=query, additional_kwargs={"image_urls": image_urls}
250
  )
251
  elif tools:
252
+ # Use tools for query execution
253
  generated_text = process_with_tools(
254
  llm, tools, agent_type, st.session_state.agent_prompt, history_query
255
  )
256
  human_message = HumanMessage(content=query)
257
  else:
258
+ # Fall back to basic query execution without tools
259
  generated_text = llm.invoke(message_with_no_image).content
260
  human_message = HumanMessage(content=query)
261
 
262
+ # Convert response into plain text
263
  if isinstance(generated_text, list):
264
  generated_text = generated_text[0]["text"]
265
 
266
+ # Update conversation history
267
  st.session_state.history.append(human_message)
268
  st.session_state.history.append(AIMessage(content=generated_text))
269
 
 
274
  return None
275
 
276
 
277
+
278
  def openai_create_image(
279
  description: str, model: str="dall-e-3", size: str="1024x1024"
280
  ) -> Optional[str]: