Spaces:
Sleeping
Sleeping
typesdigital
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,95 +1,44 @@
|
|
1 |
import os
|
2 |
-
import io
|
3 |
import gradio as gr
|
4 |
import google.generativeai as genai
|
5 |
-
from PIL import Image
|
6 |
|
7 |
# Configure the Gemini API
|
8 |
-
# Retrieve the API key from Hugging Face Spaces secrets
|
9 |
api_key = os.environ.get("GEMINI_API_KEY")
|
10 |
if not api_key:
|
11 |
raise ValueError("GEMINI_API_KEY not found in environment variables. Please set it in Hugging Face Spaces secrets.")
|
12 |
|
13 |
genai.configure(api_key=api_key)
|
14 |
|
15 |
-
def upload_to_gemini(image):
|
16 |
-
"""Uploads the given image to Gemini."""
|
17 |
-
if image is None:
|
18 |
-
return None
|
19 |
-
image_byte_array = io.BytesIO()
|
20 |
-
image.save(image_byte_array, format='JPEG')
|
21 |
-
image_byte_array = image_byte_array.getvalue()
|
22 |
-
return genai.upload_file(image_byte_array, mime_type="image/jpeg")
|
23 |
-
|
24 |
# Create the model
|
25 |
-
|
26 |
-
"temperature": 0.9,
|
27 |
-
"top_p": 0.95,
|
28 |
-
"top_k": 64,
|
29 |
-
"max_output_tokens": 1024,
|
30 |
-
"response_mime_type": "text/plain",
|
31 |
-
}
|
32 |
|
33 |
-
|
34 |
-
|
35 |
-
generation_config=generation_config,
|
36 |
-
)
|
37 |
|
38 |
-
def
|
39 |
history = history or []
|
40 |
|
41 |
try:
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
# Start a new chat session
|
46 |
-
chat_session = model.start_chat(
|
47 |
-
history=[
|
48 |
-
{
|
49 |
-
"role": "user",
|
50 |
-
"parts": [
|
51 |
-
uploaded_image,
|
52 |
-
user_message,
|
53 |
-
] if uploaded_image else [user_message],
|
54 |
-
},
|
55 |
-
]
|
56 |
-
)
|
57 |
-
else:
|
58 |
-
# Continue existing chat session
|
59 |
-
chat_session = model.start_chat(history=[
|
60 |
-
{"role": "user" if i % 2 == 0 else "model", "parts": [msg]}
|
61 |
-
for i, (msg, _) in enumerate(history)
|
62 |
-
])
|
63 |
-
|
64 |
-
# Send the new message
|
65 |
-
if uploaded_image:
|
66 |
-
chat_session.send_message([uploaded_image, user_message])
|
67 |
-
else:
|
68 |
-
chat_session.send_message(user_message)
|
69 |
-
|
70 |
-
# Get the response
|
71 |
-
response = chat_session.last
|
72 |
-
response_text = response.text
|
73 |
|
74 |
-
history
|
|
|
|
|
75 |
except Exception as e:
|
76 |
error_message = f"An error occurred: {str(e)}"
|
77 |
-
history.append((
|
78 |
-
|
79 |
-
return history, history
|
80 |
-
|
81 |
-
def clear_conversation():
|
82 |
-
return None
|
83 |
|
84 |
# Define the Gradio interface
|
85 |
with gr.Blocks() as demo:
|
86 |
-
chatbot = gr.Chatbot(label="Chat with Gemini
|
87 |
msg = gr.Textbox(label="Type your message here")
|
88 |
clear = gr.Button("Clear")
|
89 |
-
image_upload = gr.Image(type="pil", label="Upload an image (optional)")
|
90 |
|
91 |
-
msg.submit(
|
92 |
-
clear.click(
|
93 |
|
94 |
# Launch the app
|
95 |
if __name__ == "__main__":
|
|
|
1 |
import os
|
|
|
2 |
import gradio as gr
|
3 |
import google.generativeai as genai
|
|
|
4 |
|
5 |
# Configure the Gemini API
|
|
|
6 |
api_key = os.environ.get("GEMINI_API_KEY")
|
7 |
if not api_key:
|
8 |
raise ValueError("GEMINI_API_KEY not found in environment variables. Please set it in Hugging Face Spaces secrets.")
|
9 |
|
10 |
genai.configure(api_key=api_key)
|
11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
# Create the model
|
13 |
+
model = genai.GenerativeModel('gemini-pro')
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
+
# Initialize chat history
|
16 |
+
chat = model.start_chat(history=[])
|
|
|
|
|
17 |
|
18 |
+
def respond(message, history):
|
19 |
history = history or []
|
20 |
|
21 |
try:
|
22 |
+
# Send user message and get response
|
23 |
+
response = chat.send_message(message)
|
24 |
+
bot_message = response.text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
+
# Append to history and return
|
27 |
+
history.append((message, bot_message))
|
28 |
+
return history, history
|
29 |
except Exception as e:
|
30 |
error_message = f"An error occurred: {str(e)}"
|
31 |
+
history.append((message, error_message))
|
32 |
+
return history, history
|
|
|
|
|
|
|
|
|
33 |
|
34 |
# Define the Gradio interface
|
35 |
with gr.Blocks() as demo:
|
36 |
+
chatbot = gr.Chatbot(label="Chat with Gemini")
|
37 |
msg = gr.Textbox(label="Type your message here")
|
38 |
clear = gr.Button("Clear")
|
|
|
39 |
|
40 |
+
msg.submit(respond, [msg, chatbot], [chatbot, chatbot])
|
41 |
+
clear.click(lambda: None, outputs=[chatbot])
|
42 |
|
43 |
# Launch the app
|
44 |
if __name__ == "__main__":
|