import gradio as gr from openai import OpenAI def predict(message, history, character, api_key, progress=gr.Progress()): client = OpenAI(api_key=api_key) history_openai_format = [] for human, assistant in history: history_openai_format.append({"role": "user", "content": human}) history_openai_format.append({"role": "assistant", "content": assistant}) history_openai_format.append({"role": "user", "content": message}) response = client.chat.completions.create( model='gpt-4', messages=history_openai_format, temperature=1.0, stream=True ) partial_message = "" for chunk in progress.tqdm(response, desc="Generating"): if chunk.choices[0].delta.content: partial_message += chunk.choices[0].delta.content yield partial_message def reset(character): return [], [] # Gradio app with gr.Blocks() as demo: gr.Markdown(f"