Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import google.generativeai as genai
|
3 |
+
|
4 |
+
# Set your API key
|
5 |
+
genai.configure(api_key='YOUR_API_KEY')
|
6 |
+
|
7 |
+
# Initialize the model
|
8 |
+
model = genai.GenerativeModel(model='gemini-1.5-flash')
|
9 |
+
|
10 |
+
def respond(user_message, history, chat_state):
|
11 |
+
if chat_state is None:
|
12 |
+
# Start a new chat with an initial greeting
|
13 |
+
chat_state = model.start_chat(
|
14 |
+
history=[
|
15 |
+
{"role": "user", "content": "Hello"},
|
16 |
+
{"role": "assistant", "content": "Great to meet you. What would you like to know?"},
|
17 |
+
]
|
18 |
+
)
|
19 |
+
# Initialize history if it's empty
|
20 |
+
if not history:
|
21 |
+
history = [["Hello", "Great to meet you. What would you like to know?"]]
|
22 |
+
# Send the user's message to the model
|
23 |
+
response = chat_state.send_message(user_message)
|
24 |
+
# Append the user's message and model's response to the history
|
25 |
+
history.append([user_message, response.text])
|
26 |
+
return history, chat_state, ''
|
27 |
+
|
28 |
+
with gr.Blocks() as demo:
|
29 |
+
gr.Markdown("<h1 align='center'>Gemini 1.5 Flash Chatbot Demo</h1>")
|
30 |
+
chatbot = gr.Chatbot([[ "Hello", "Great to meet you. What would you like to know?"]])
|
31 |
+
msg = gr.Textbox(placeholder="Type your message here...", show_label=False)
|
32 |
+
state = gr.State() # To store the chat_state object
|
33 |
+
|
34 |
+
msg.submit(respond, [msg, chatbot, state], [chatbot, state, msg])
|
35 |
+
|
36 |
+
demo.launch()
|