import os import copy import random, time import gradio as gr import openai from tools import get_movie_recs openai.api_key = os.environ['OPENAI_API_KEY'] ##################### ### Chatbot logic ### ##################### functions = [ { "name": "get_movie_recs", "description": "Given conversation context, generate a list of movie recommendations.", "parameters": { "type": "object", "properties": { "context": { "type": "string", "description": "Entire conversation history to this point.", }, }, }, } ] available_functions = {'get_movie_recs': get_movie_recs} system_prompt = """ You are a helpful assistant for customers of Swank Motion Pictures, a company that provides movie licensing for various public and private events. Your job is to assist customers in selecting a movie. Customers usually select movies based on the intended audience or event theme, and may also care about genre preference, movie length, and mood. At your discretion, you may call a `get_movie_recs` function to query a recommender system. It takes the entire conversation history as input and returns a list of movies as output. Use the function to ground your response where appropriate. If the user is asking to pick between options they provide, do not call the function. Otherwise, call the function. Do not reveal to the user that you can query a recommender system. Don't equivocate and take a stand if the user asks you a question. If uncertain, provide information that will help the user make a decision. Don't repeat what the user said. Be direct. Don't hedge. Omit disclaimers. """ greeting = """ Hey there! Need help picking out a movie for your event? Just describe your audience or theme, and I'll suggest some great options! """ initial_state = [ {"role": "system", "content": system_prompt}, {"role": "assistant", "content": greeting}, ] # response logic for chatbot def respond( user_message, chat_history, openai_chat_history, ): ''' :param user_message: string, the user's message :param chat_history: list of lists, each sublist is a pair of user and assistant messages. This is rendered in the chatbot. :param openai_chat_history: list of dicts, superset of chat_history that includes function calls. This is sent to OpenAI. ''' openai_chat_history.append({'role': 'user', 'content': user_message}) chat_history.append([user_message, None]) # Step 1: send conversation and available functions to GPT response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=openai_chat_history, functions=functions, function_call="auto", temperature=0, stream=True, ) for chunk in response: delta = chunk.choices[0].delta # Step 2: check if GPT wanted to call a function if "function_call" in delta: if "name" in delta.function_call: function_name = delta["function_call"]["name"] function_to_call = available_functions[function_name] # Step 3: call the function elif chunk.choices[0].finish_reason == "function_call": # send conversation history that's visible in the chatbot context = "" for interaction in chat_history[:-1]: context+=f"User: {interaction[0]}\nAssistant: {interaction[1]}\n" context+=f"User: {user_message}" # include the latest message print('calling function') function_response = function_to_call(context=context) # Step 4: send the info on the function call and function response to GPT # include function call in history openai_chat_history.append({ 'role': 'assistant', 'content': None, 'function_call': {'name': function_name, 'arguments': 'null'}, }) # include function response openai_chat_history.append( { "role": "function", "name": function_name, "content": function_response, } ) # get a new response from GPT where it can see the function response second_response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=openai_chat_history, stream=True, ) for chunk2 in second_response: if len(chunk2['choices'][0]['delta']) != 0: if chat_history[-1][1] is None: chat_history[-1][1] = "" chat_history[-1][1] += chunk2['choices'][0]['delta']['content'] yield "", chat_history, openai_chat_history # if last chunk, update openai_chat_history with full message if chunk2.choices[0].finish_reason == "stop": openai_chat_history.append({'role': 'assistant', 'content': chat_history[-1][1]}) yield "", chat_history, openai_chat_history # Step 5: If no function call, just return updated state variables elif 'function_call' not in delta and len(delta)!=0: if chat_history[-1][1] is None: chat_history[-1][1] = "" chat_history[-1][1] += delta['content'] yield "", chat_history, openai_chat_history # if last chunk, update openai_chat_history with full message elif chunk.choices[0].finish_reason == 'stop': openai_chat_history.append({'role': 'assistant', 'content': chat_history[-1][1]}) yield "", chat_history, openai_chat_history ######################## ### Gradio interface ### ######################## with gr.Blocks(theme=gr.themes.Soft()) as demo: # This state variable also includes function calls and system message. Be careful with getting out of sync with the displayed conversation. openai_history_state = gr.State(copy.deepcopy(initial_state)) # saved_input = gr.State() # for retry with gr.Column(variant='panel'): gr.Markdown(f"