prithivMLmods commited on
Commit
d24e2bd
·
verified ·
1 Parent(s): 5a2b76f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -10
app.py CHANGED
@@ -17,7 +17,7 @@ def search(query):
17
  with requests.Session() as session:
18
  resp = session.get(
19
  url="https://www.google.com/search",
20
- headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"},
21
  params={"q": term, "num": 3, "udm": 14},
22
  timeout=5,
23
  verify=None,
@@ -29,7 +29,7 @@ def search(query):
29
  link = result.find("a", href=True)
30
  link = link["href"]
31
  try:
32
- webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"}, timeout=5, verify=False)
33
  webpage.raise_for_status()
34
  visible_text = extract_text_from_webpage(webpage.text)
35
  if len(visible_text) > max_chars_per_page:
@@ -39,14 +39,11 @@ def search(query):
39
  all_results.append({"link": link, "text": None})
40
  return all_results
41
 
42
- # Initialize inference clients for different models
43
  client_gemma = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
44
- client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
45
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
46
 
47
  func_caller = []
48
 
49
- # Define the main chat function
50
  def respond(message, history):
51
  func_caller = []
52
 
@@ -81,7 +78,7 @@ def respond(message, history):
81
  web_results = search(query)
82
  gr.Info("Extracting relevant Info")
83
  web2 = ' '.join([f"Link: {res['link']}\nText: {res['text']}\n\n" for res in web_results if res['text']])
84
- messages = f"system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You are provided with WEB results from which you can find informations to answer users query in Structured and More better way. You do not say Unnecesarry things Only say thing which is important and relevant. You also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions."
85
  for msg in history:
86
  messages += f"\nuser\n{str(msg[0])}"
87
  messages += f"\nassistant\n{str(msg[1])}"
@@ -93,7 +90,7 @@ def respond(message, history):
93
  output += response.token.text
94
  yield output
95
  else:
96
- messages = f"system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions."
97
  for msg in history:
98
  messages += f"\nuser\n{str(msg[0])}"
99
  messages += f"\nassistant\n{str(msg[1])}"
@@ -105,7 +102,7 @@ def respond(message, history):
105
  output += response.token.text
106
  yield output
107
  except:
108
- messages = f"system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions."
109
  for msg in history:
110
  messages += f"\nuser\n{str(msg[0])}"
111
  messages += f"\nassistant\n{str(msg[1])}"
@@ -121,8 +118,8 @@ demo = gr.ChatInterface(
121
  fn=respond,
122
  chatbot=gr.Chatbot(show_copy_button=True, likeable=True, layout="panel"),
123
  description=" ",
124
- textbox=gr.Textbox(), # Changed to Textbox
125
- multimodal=False, # Disabled multimodal
126
  concurrency_limit=200,
127
  )
128
  demo.launch(share=True)
 
17
  with requests.Session() as session:
18
  resp = session.get(
19
  url="https://www.google.com/search",
20
+ headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36"},
21
  params={"q": term, "num": 3, "udm": 14},
22
  timeout=5,
23
  verify=None,
 
29
  link = result.find("a", href=True)
30
  link = link["href"]
31
  try:
32
+ webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36"}, timeout=5, verify=False)
33
  webpage.raise_for_status()
34
  visible_text = extract_text_from_webpage(webpage.text)
35
  if len(visible_text) > max_chars_per_page:
 
39
  all_results.append({"link": link, "text": None})
40
  return all_results
41
 
 
42
  client_gemma = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
 
43
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
44
 
45
  func_caller = []
46
 
 
47
  def respond(message, history):
48
  func_caller = []
49
 
 
78
  web_results = search(query)
79
  gr.Info("Extracting relevant Info")
80
  web2 = ' '.join([f"Link: {res['link']}\nText: {res['text']}\n\n" for res in web_results if res['text']])
81
+ messages = f"system\nWeb Dac uses the user agents of Mozilla, AppleWebKit, and Safari browsers for chat responses and human context mimicking."
82
  for msg in history:
83
  messages += f"\nuser\n{str(msg[0])}"
84
  messages += f"\nassistant\n{str(msg[1])}"
 
90
  output += response.token.text
91
  yield output
92
  else:
93
+ messages = f"system\nWeb Dac uses the user agents of Mozilla, AppleWebKit, and Safari browsers for chat responses and human context mimicking."
94
  for msg in history:
95
  messages += f"\nuser\n{str(msg[0])}"
96
  messages += f"\nassistant\n{str(msg[1])}"
 
102
  output += response.token.text
103
  yield output
104
  except:
105
+ messages = f"system\nWeb Dac uses the user agents of Mozilla, AppleWebKit, and Safari browsers for chat responses and human context mimicking."
106
  for msg in history:
107
  messages += f"\nuser\n{str(msg[0])}"
108
  messages += f"\nassistant\n{str(msg[1])}"
 
118
  fn=respond,
119
  chatbot=gr.Chatbot(show_copy_button=True, likeable=True, layout="panel"),
120
  description=" ",
121
+ textbox=gr.Textbox(),
122
+ multimodal=False,
123
  concurrency_limit=200,
124
  )
125
  demo.launch(share=True)