akhaliq HF staff commited on
Commit
c59c85b
·
verified ·
1 Parent(s): a54087d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -31
app.py CHANGED
@@ -1,40 +1,58 @@
1
- import gradio as gr
2
  import google.generativeai as genai
 
3
 
4
- # Set your API key
5
- genai.configure(api_key='YOUR_API_KEY')
6
 
7
- # Initialize the model
8
- model = genai.GenerativeModel('gemini-1.5-flash')
 
 
 
 
 
 
9
 
10
- def respond(user_message, history, chat_state):
11
- if chat_state is None:
12
- # Start a new chat with an initial greeting
13
- chat_state = model.start_chat(
14
- messages=[
15
- {"author": "user", "content": "Hello"},
16
- {"author": "assistant", "content": "Great to meet you. What would you like to know?"},
17
- ]
18
- )
19
- # Initialize history if it's empty
20
- if not history:
21
- history = [["Hello", "Great to meet you. What would you like to know?"]]
22
- else:
23
- # Continue the conversation
24
- pass # No need to manually append messages; send_message handles it
25
 
26
- # Send the user's message to the model
27
- response = chat_state.send_message(user_message)
28
- # Append the user's message and model's response to the history
29
- history.append([user_message, response.text])
30
- return history, chat_state, ''
 
 
 
 
 
 
 
 
 
 
 
31
 
 
32
  with gr.Blocks() as demo:
33
- gr.Markdown("<h1 align='center'>Gemini 1.5 Flash Chatbot Demo</h1>")
34
- chatbot = gr.Chatbot([["Hello", "Great to meet you. What would you like to know?"]])
35
- msg = gr.Textbox(placeholder="Type your message here...", show_label=False)
36
- state = gr.State() # To store the chat_state object
37
-
38
- msg.submit(respond, [msg, chatbot, state], [chatbot, state, msg])
 
 
 
 
 
 
 
 
39
 
40
  demo.launch()
 
1
+ import os
2
  import google.generativeai as genai
3
+ import gradio as gr
4
 
5
+ # Configure the API key
6
+ genai.configure(api_key=os.environ["GEMINI_API_KEY"])
7
 
8
+ # Create the model
9
+ generation_config = {
10
+ "temperature": 1,
11
+ "top_p": 0.95,
12
+ "top_k": 64,
13
+ "max_output_tokens": 8192,
14
+ "response_mime_type": "text/plain",
15
+ }
16
 
17
+ model = genai.GenerativeModel(
18
+ model_name="gemini-1.5-flash",
19
+ generation_config=generation_config,
20
+ # safety_settings can be adjusted
21
+ # See https://ai.google.dev/gemini-api/docs/safety-settings
22
+ )
 
 
 
 
 
 
 
 
 
23
 
24
+ # Function to handle chat
25
+ def chat_with_model(user_input, history):
26
+ # Update the history with the user's input
27
+ history.append({"role": "user", "content": user_input})
28
+ # Start or continue the chat session
29
+ chat_session = model.start_chat(history=history)
30
+ response = chat_session.send_message(user_input)
31
+ # Add the assistant's response to the history
32
+ history.append({"role": "assistant", "content": response.text})
33
+ # Format the history for display in Gradio Chatbot
34
+ messages = []
35
+ for i in range(0, len(history), 2):
36
+ user_msg = history[i]["content"]
37
+ assistant_msg = history[i+1]["content"] if i+1 < len(history) else ""
38
+ messages.append((user_msg, assistant_msg))
39
+ return messages, history
40
 
41
+ # Create Gradio app
42
  with gr.Blocks() as demo:
43
+ gr.Markdown("# Chat with Gemini Model")
44
+ chatbot = gr.Chatbot()
45
+ state = gr.State([])
46
+ with gr.Row():
47
+ user_input = gr.Textbox(
48
+ show_label=False,
49
+ placeholder="Type your message and press Enter"
50
+ ).style(container=False)
51
+ send_btn = gr.Button("Send")
52
+ send_btn.click(chat_with_model, [user_input, state], [chatbot, state])
53
+ user_input.submit(chat_with_model, [user_input, state], [chatbot, state])
54
+ # Clear input after sending
55
+ send_btn.click(lambda: "", None, user_input)
56
+ user_input.submit(lambda: "", None, user_input)
57
 
58
  demo.launch()