Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -6,32 +6,40 @@ from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
|
|
6 |
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./firm-catalyst-437006-s4-407500537db5.json"
|
7 |
|
8 |
# Chat function
|
9 |
-
def chat_with_gemini(user_input, chat_history=
|
10 |
try:
|
11 |
-
# Append user input to the chat history
|
12 |
-
chat_history
|
13 |
-
|
14 |
-
# Get response from the model
|
15 |
-
response = llm.predict(user_input)
|
16 |
|
17 |
-
# Append the response to the chat history
|
18 |
-
chat_history
|
19 |
|
20 |
# Return the updated chat history
|
21 |
return chat_history
|
22 |
except Exception as e:
|
23 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
-
#
|
26 |
-
|
27 |
-
|
28 |
-
inputs=["text", "text"], # "text" input for user message and chat history
|
29 |
-
outputs="text", # Single "text" output for the chat history
|
30 |
-
title="Chatbot with Gemini 1.5",
|
31 |
-
description="Ask me anything!",
|
32 |
-
theme="default",
|
33 |
-
live=True
|
34 |
-
)
|
35 |
|
36 |
# Launch the interface with debugging enabled
|
37 |
iface.launch(debug=True)
|
|
|
6 |
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./firm-catalyst-437006-s4-407500537db5.json"
|
7 |
|
8 |
# Chat function
|
9 |
+
def chat_with_gemini(user_input, chat_history=[]):
|
10 |
try:
|
11 |
+
# Append the user input to the chat history
|
12 |
+
chat_history.append(("User", user_input))
|
13 |
+
|
14 |
+
# Get response from the model
|
15 |
+
response = llm.predict(user_input) # Assuming 'predict' is the correct method
|
16 |
|
17 |
+
# Append the bot's response to the chat history
|
18 |
+
chat_history.append(("Bot", response))
|
19 |
|
20 |
# Return the updated chat history
|
21 |
return chat_history
|
22 |
except Exception as e:
|
23 |
+
# In case of an error, return the error message in the chat
|
24 |
+
chat_history.append(("Bot", f"Error: {str(e)}"))
|
25 |
+
return chat_history
|
26 |
+
|
27 |
+
# Create a Gradio interface
|
28 |
+
with gr.Blocks() as iface:
|
29 |
+
gr.Markdown("# Chatbot with Gemini 1.5")
|
30 |
+
gr.Markdown("Ask me anything!")
|
31 |
+
|
32 |
+
chatbot = gr.Chatbot()
|
33 |
+
msg = gr.Textbox(label="Type your message here...")
|
34 |
+
state = gr.State([]) # Store chat history
|
35 |
+
|
36 |
+
# Define the interaction between components
|
37 |
+
def send_message(user_input, chat_history):
|
38 |
+
return chat_with_gemini(user_input, chat_history)
|
39 |
|
40 |
+
# Set up the interaction for when the user submits a message
|
41 |
+
msg.submit(send_message, [msg, state], [chatbot, state])
|
42 |
+
msg.submit(lambda: "", None, msg) # Clear input box after submission
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
|
44 |
# Launch the interface with debugging enabled
|
45 |
iface.launch(debug=True)
|