KingNish commited on
Commit
c08168a
1 Parent(s): 6069913

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -6
app.py CHANGED
@@ -75,10 +75,13 @@ def search(query):
75
  return all_results
76
 
77
  # Initialize inference clients for different models
78
- client_gemma = InferenceClient("google/gemma-1.1-7b-it")
79
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
80
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
81
 
 
 
 
82
  # Define the main chat function
83
  def respond(message, history):
84
  func_caller = []
@@ -104,11 +107,15 @@ def respond(message, history):
104
  {"type": "function", "function": {"name": "image_generation", "description": "Generate image for user", "parameters": {"type": "object", "properties": {"query": {"type": "string", "description": "image generation prompt"}, "number_of_image": {"type": "integer", "description": "number of images to generate"}}, "required": ["query"]}}},
105
  {"type": "function", "function": {"name": "image_qna", "description": "Answer question asked by user related to image", "parameters": {"type": "object", "properties": {"query": {"type": "string", "description": "Question by user"}}, "required": ["query"]}}},
106
  ]
107
-
 
 
 
 
108
  message_text = message["text"]
109
  func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message_text}'})
110
 
111
- response = client_gemma.chat_completion(func_caller, max_tokens=150)
112
  response = str(response)
113
  try:
114
  response = response[int(response.find("{")):int(response.index("</"))]
@@ -117,10 +124,9 @@ def respond(message, history):
117
  response = response.replace("\\n", "")
118
  response = response.replace("\\'", "'")
119
  response = response.replace('\\"', '"')
 
120
  print(f"\n{response}")
121
 
122
- func_caller.append({"role": "assistant", "content": f"<functioncall>{response}</functioncall>"})
123
-
124
  try:
125
  json_data = json.loads(str(response))
126
  if json_data["name"] == "web_search":
@@ -145,7 +151,7 @@ def respond(message, history):
145
  gr.Info("Generating Image, Please wait 10 sec...")
146
  client = InferenceClient("stabilityai/stable-diffusion-3-medium-diffusers")
147
  seed = random.randint(0,999999)
148
- image = client.text_to_image(message_text, negative_prompt=f"{seed}")
149
  yield gr.Image(image)
150
  gr.Info("We are going to mor upgrade image generator in next update")
151
  elif json_data["name"] == "image_qna":
 
75
  return all_results
76
 
77
  # Initialize inference clients for different models
78
+ client_gemma = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
79
  client_mixtral = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
80
  client_llama = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
81
 
82
+
83
+ func_caller = []
84
+
85
  # Define the main chat function
86
  def respond(message, history):
87
  func_caller = []
 
107
  {"type": "function", "function": {"name": "image_generation", "description": "Generate image for user", "parameters": {"type": "object", "properties": {"query": {"type": "string", "description": "image generation prompt"}, "number_of_image": {"type": "integer", "description": "number of images to generate"}}, "required": ["query"]}}},
108
  {"type": "function", "function": {"name": "image_qna", "description": "Answer question asked by user related to image", "parameters": {"type": "object", "properties": {"query": {"type": "string", "description": "Question by user"}}, "required": ["query"]}}},
109
  ]
110
+
111
+ for msg in history:
112
+ func_caller.append({"role": "user", "content": f"{str(msg[0])}"})
113
+ func_caller.append({"role": "assistant", "content": f"{str(msg[1])}"})
114
+
115
  message_text = message["text"]
116
  func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message_text}'})
117
 
118
+ response = client_gemma.chat_completion(func_caller, max_tokens=200)
119
  response = str(response)
120
  try:
121
  response = response[int(response.find("{")):int(response.index("</"))]
 
124
  response = response.replace("\\n", "")
125
  response = response.replace("\\'", "'")
126
  response = response.replace('\\"', '"')
127
+ response = response.replace('\\', '')
128
  print(f"\n{response}")
129
 
 
 
130
  try:
131
  json_data = json.loads(str(response))
132
  if json_data["name"] == "web_search":
 
151
  gr.Info("Generating Image, Please wait 10 sec...")
152
  client = InferenceClient("stabilityai/stable-diffusion-3-medium-diffusers")
153
  seed = random.randint(0,999999)
154
+ image = client.text_to_image(message_text, negative_prompt=f"{seed}", num_inference_steps=20 )
155
  yield gr.Image(image)
156
  gr.Info("We are going to mor upgrade image generator in next update")
157
  elif json_data["name"] == "image_qna":