KingNish commited on
Commit
8276bc3
1 Parent(s): ae0b491

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -19
app.py CHANGED
@@ -22,10 +22,9 @@ def extract_text_from_webpage(html_content):
22
 
23
  def search(query):
24
  term = query
25
- print(f"Running web search for query: {term}")
26
  start = 0
27
  all_results = []
28
- max_chars_per_page = 8000
29
  with requests.Session() as session:
30
  resp = session.get(
31
  url="https://www.google.com/search",
@@ -47,7 +46,7 @@ def search(query):
47
  if len(visible_text) > max_chars_per_page:
48
  visible_text = visible_text[:max_chars_per_page]
49
  all_results.append({"link": link, "text": visible_text})
50
- except requests.exceptions.RequestException as e:
51
  all_results.append({"link": link, "text": None})
52
  return all_results
53
 
@@ -55,23 +54,19 @@ def search(query):
55
  client_gemma = InferenceClient("google/gemma-1.1-7b-it")
56
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
57
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
58
- messages = []
59
 
60
  # Define the main chat function
61
  def respond(message, history):
62
- global messages # Make messages global for persistent storage
63
  vqa = ""
64
 
65
  # Handle image processing
66
  if message["files"]:
67
- try:
68
- for image in message["files"]:
69
- vqa += "[CAPTION of IMAGE] "
70
- gr.Info("Analyzing image")
71
- vqa += generate_caption_instructblip(image, message["text"])
72
- print(vqa)
73
- except:
74
- vqa = ""
75
 
76
  # Define function metadata for user interface
77
  functions_metadata = [
@@ -82,11 +77,8 @@ def respond(message, history):
82
  ]
83
 
84
  message_text = message["text"]
 
85
 
86
- # Append user messages and system instructions to the messages list
87
- messages.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message} {vqa}'})
88
-
89
- # Call the LLM for response generation
90
  response = client_gemma.chat_completion(messages, max_tokens=150)
91
  response = str(response)
92
  try:
@@ -98,9 +90,8 @@ def respond(message, history):
98
  response = response.replace('\\"', '"')
99
  print(f"\n{response}")
100
 
101
- messages.append({"role": "assistant", "content": f"<functioncall>{response}</functioncall>"})
102
 
103
- # Process and return the response based on the function call
104
  try:
105
  json_data = json.loads(str(response))
106
  if json_data["name"] == "web_search":
 
22
 
23
  def search(query):
24
  term = query
 
25
  start = 0
26
  all_results = []
27
+ max_chars_per_page = 8000
28
  with requests.Session() as session:
29
  resp = session.get(
30
  url="https://www.google.com/search",
 
46
  if len(visible_text) > max_chars_per_page:
47
  visible_text = visible_text[:max_chars_per_page]
48
  all_results.append({"link": link, "text": visible_text})
49
+ except requests.exceptions.RequestException:
50
  all_results.append({"link": link, "text": None})
51
  return all_results
52
 
 
54
  client_gemma = InferenceClient("google/gemma-1.1-7b-it")
55
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
56
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
57
+ func_caller = []
58
 
59
  # Define the main chat function
60
  def respond(message, history):
61
+ global func_caller
62
  vqa = ""
63
 
64
  # Handle image processing
65
  if message["files"]:
66
+ for image in message["files"]:
67
+ vqa += "[CAPTION of IMAGE] "
68
+ gr.Info("Analyzing image")
69
+ vqa += generate_caption_instructblip(image, message["text"])
 
 
 
 
70
 
71
  # Define function metadata for user interface
72
  functions_metadata = [
 
77
  ]
78
 
79
  message_text = message["text"]
80
+ func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message} {vqa}'})
81
 
 
 
 
 
82
  response = client_gemma.chat_completion(messages, max_tokens=150)
83
  response = str(response)
84
  try:
 
90
  response = response.replace('\\"', '"')
91
  print(f"\n{response}")
92
 
93
+ func_caller.append({"role": "assistant", "content": f"<functioncall>{response}</functioncall>"})
94
 
 
95
  try:
96
  json_data = json.loads(str(response))
97
  if json_data["name"] == "web_search":