Spaces:
Runtime error
Runtime error
File size: 4,271 Bytes
172982e b486454 172982e b486454 172982e b486454 172982e 8b369c5 172982e 8b369c5 172982e b486454 172982e 8b369c5 172982e 8b369c5 172982e 8b369c5 b486454 172982e ce40fe3 b486454 172982e 8b369c5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import gradio as gr
import os
import time
import google.generativeai as genai
genai.configure(api_key=os.environ["palm_key"])
# Create the model
# See https://ai.google.dev/api/python/google/generativeai/GenerativeModel
generation_config = {
"temperature": 1,
"top_p": 0.95,
"top_k": 64,
"max_output_tokens": 8192,
"response_mime_type": "text/plain",
}
safety_settings = [
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE",
},
]
model = genai.GenerativeModel(
model_name="gemini-1.5-flash-latest",
safety_settings=safety_settings,
generation_config=generation_config,
)
chat_session = model.start_chat(
history=[
]
)
context = "You are an order bot with only 3 types of pizzas: Margherita, Cheesy-Bacon and Vegetarian. Prices are small (9$), medium (11$) and large (13$). You can offer 2 types of drinks coke and mountain dew for $1 each."
examples = [
[
"Hi, I want to order pizzas.",
"Hello, we have 3 types of pizzas: Margherita, Cheesy-Bacon and Vegetarian. Prices are small (9$), medium (11$) and large (13$)."
],
[
"I want a large margherita and a medium cheesy-bacon.",
"Do you want any drinks? We have coke (3$) or Mountain dew (5$)."
],
[
"Yes 2 cokes.",
"Total will be 19$. Please enter your address and phone number"
],
[
"Paris eiffel tower, 0653145863",
"Perfect ! Do you need anything else?"
],
[
"No thanks.",
"Have a nice day!\nHere is the sum up: \n{\npizza_type_1: \"margherita\",\npizza_type_2: \"cheesy-bacon\",\npizza_size_1: \"L\",\npizza_size_2: \"M\",\ndrink_1: \"coke\",\ndrink_2: \"coke\",\ntotal_price: \"19\",\naddress: \"Paris, Eiffel Tower\",\nphone_number: \"0653145863\"\n}\nDo you want to change anything?"
],
[
"Yes, i'd like to change margherita size to M.",
"Have a nice day!\nHere is the sum up: \n{\npizza_type_1: \"margherita\",\npizza_type_2: \"cheesy-bacon\",\npizza_size_1: \"M\",\npizza_size_2: \"M\",\ndrink_1: \"coke\",\ndrink_2: \"coke\",\ntotal_price: \"19\",\naddress: \"Paris, Eiffel Tower\",\nphone_number: \"0653145863\"\n}\nDo you want to change anything?"
],
[
"No thanks",
"Have a nice day!"
]
]
history = [['']]
response = chat_session.send_message(context + "\n" + "Here are some examples for your interactions: \n" + ".\n".join(
["user: " + pair[0] + ".\nyour response: " + pair[1] for pair in examples]
))
print(response.text)
print(chat_session.history)
with gr.Blocks(theme=gr.themes.Soft()) as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
btn = gr.Button("Submit", variant="primary")
clear = gr.Button("Clear")
def user(user_message, history):
history.append([user_message, None])
return gr.update(value=""), history
def bot(history):
try:
bot_message = chat_session.send_message(
h[-1][0]
)
history[-1][1] = ""
for character in bot_message.text:
history[-1][1] += character
time.sleep(0.005)
except Exception as e:
# Handle the exception here
print("Error occurred:", str(e))
# You can customize the error handling as per your requirements
# For example, return an error message to the user
history[-1][1] = "Incorrect input please retry with a longer sentence in english"
return history
response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
response = btn.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
demo.queue()
demo.launch()
|