Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,13 +6,11 @@ from pypdf import PdfReader
|
|
| 6 |
import gradio as gr
|
| 7 |
import csv
|
| 8 |
|
| 9 |
-
print("hi")
|
| 10 |
-
|
| 11 |
# Load environment variables
|
| 12 |
load_dotenv(override=True)
|
| 13 |
|
| 14 |
GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
|
| 15 |
-
google_api_key = os.getenv("GOOGLE_API_KEY")
|
| 16 |
gemini = OpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
|
| 17 |
|
| 18 |
# CSV files for logging
|
|
@@ -150,40 +148,36 @@ class Me:
|
|
| 150 |
|
| 151 |
# Main chat function
|
| 152 |
def chat(self, message, history):
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
messages.append({"role": "assistant", "content": assistant_msg})
|
| 158 |
-
messages.append({"role": "user", "content": message})
|
| 159 |
-
|
| 160 |
-
# 2. First API call: The model decides if it needs a tool.
|
| 161 |
response = self.openai.chat.completions.create(
|
| 162 |
model="gemini-2.5-flash-preview-05-20",
|
| 163 |
messages=messages,
|
| 164 |
tools=tools
|
| 165 |
)
|
| 166 |
-
|
| 167 |
-
#
|
| 168 |
if response.choices[0].finish_reason == "tool_calls":
|
| 169 |
-
|
| 170 |
-
|
| 171 |
|
| 172 |
-
#
|
| 173 |
-
results = self.handle_tool_call(
|
| 174 |
|
| 175 |
-
#
|
| 176 |
-
messages.append(
|
| 177 |
messages.extend(results)
|
| 178 |
-
|
| 179 |
-
#
|
| 180 |
-
|
| 181 |
model="gemini-2.5-flash-preview-05-20",
|
| 182 |
-
messages=messages
|
| 183 |
)
|
| 184 |
-
return
|
| 185 |
else:
|
| 186 |
-
# If no tool call, return the direct response
|
| 187 |
return response.choices[0].message.content
|
| 188 |
|
| 189 |
|
|
|
|
| 6 |
import gradio as gr
|
| 7 |
import csv
|
| 8 |
|
|
|
|
|
|
|
| 9 |
# Load environment variables
|
| 10 |
load_dotenv(override=True)
|
| 11 |
|
| 12 |
GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
|
| 13 |
+
google_api_key = os.getenv("GOOGLE_API_KEY","AIzaSyDGXQliok_IEo51FGx_7EWiJ_WlCj0e0qc")
|
| 14 |
gemini = OpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
|
| 15 |
|
| 16 |
# CSV files for logging
|
|
|
|
| 148 |
|
| 149 |
# Main chat function
|
| 150 |
def chat(self, message, history):
|
| 151 |
+
messages = [{"role": "system", "content": self.system_prompt()}] + history + [{"role": "user", "content": message}]
|
| 152 |
+
done = False
|
| 153 |
+
#while not done:
|
| 154 |
+
# Send the first message
|
|
|
|
|
|
|
|
|
|
|
|
|
| 155 |
response = self.openai.chat.completions.create(
|
| 156 |
model="gemini-2.5-flash-preview-05-20",
|
| 157 |
messages=messages,
|
| 158 |
tools=tools
|
| 159 |
)
|
| 160 |
+
|
| 161 |
+
# Check if the response contains tool calls
|
| 162 |
if response.choices[0].finish_reason == "tool_calls":
|
| 163 |
+
message_to_add = response.choices[0].message
|
| 164 |
+
tool_calls_to_execute = message_to_add.tool_calls
|
| 165 |
|
| 166 |
+
# Execute the tool and get the result.
|
| 167 |
+
results = self.handle_tool_call(tool_calls_to_execute)
|
| 168 |
|
| 169 |
+
# Append the tool call and its output to the messages list.
|
| 170 |
+
messages.append(message_to_add)
|
| 171 |
messages.extend(results)
|
| 172 |
+
|
| 173 |
+
# Now call the API again with the tool output
|
| 174 |
+
response = self.openai.chat.completions.create(
|
| 175 |
model="gemini-2.5-flash-preview-05-20",
|
| 176 |
+
messages=messages,
|
| 177 |
)
|
| 178 |
+
return response.choices[0].message.content
|
| 179 |
else:
|
| 180 |
+
# If no tool call, return the direct response
|
| 181 |
return response.choices[0].message.content
|
| 182 |
|
| 183 |
|