Spaces:
Sleeping
Sleeping
File size: 1,903 Bytes
e6868fd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
from openai import OpenAI
import gradio as gr
def predict(message, history, api_key):
print('in predict')
client = OpenAI(api_key=api_key)
history_openai_format = []
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human})
history_openai_format.append({"role": "assistant", "content": assistant})
history_openai_format.append({"role": "user", "content": message})
response = client.chat.completions.create(
model='gpt-4o',
messages=history_openai_format,
temperature=1.0,
stream=True
)
partial_message = ""
for chunk in response:
if chunk.choices[0].delta.content:
print(111, chunk.choices[0].delta.content)
partial_message += chunk.choices[0].delta.content
yield partial_message
def chat_with_api_key(api_key, message, history):
print('in chat_with_api_key')
accumulated_message = ""
for partial_message in predict(message, history, api_key):
accumulated_message = partial_message
history.append((message, accumulated_message))
yield accumulated_message, history
with gr.Blocks() as demo:
with gr.Row():
api_key = gr.Textbox(label="API Key", placeholder="Enter your API key", type="password")
message = gr.Textbox(label="Message")
state = gr.State([])
output = gr.Textbox(label="Output", lines=10)
def update_output(api_key, message, state):
print('in update_output')
responses = chat_with_api_key(api_key, message, state)
accumulated_response = ""
for response, updated_state in responses:
accumulated_response = response
yield accumulated_response, updated_state
btn = gr.Button("Submit")
btn.click(update_output, inputs=[api_key, message, state], outputs=[output, state])
demo.launch()
|