leek2 commited on
Commit
3009d0e
1 Parent(s): c7ca435

first commit

Browse files
Files changed (4) hide show
  1. .gitignore +15 -0
  2. app.py +199 -0
  3. requirements.txt +2 -0
  4. tools.py +20 -0
.gitignore ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __pycache__
2
+ env
3
+ .env
4
+ .DS_Store
5
+ .vscode
6
+ *.swp
7
+ init.sh
8
+ *ignore*
9
+ !.gitignore
10
+ !.slugignore
11
+ !.dockerignore
12
+ .ipynb_checkpoints/
13
+ .coverage
14
+ htmlcov
15
+ build
app.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import copy
3
+ import random, time
4
+
5
+ import gradio as gr
6
+ import openai
7
+
8
+ from tools import get_movie_recs
9
+
10
+ openai.api_key = os.environ['OPENAI_API_KEY']
11
+
12
+ #####################
13
+ ### Chatbot logic ###
14
+ #####################
15
+
16
+ functions = [
17
+ {
18
+ "name": "get_movie_recs",
19
+ "description": "Given conversation context, generate a list of movie recommendations.",
20
+ "parameters": {
21
+ "type": "object",
22
+ "properties": {
23
+ "context": {
24
+ "type": "string",
25
+ "description": "Entire conversation history to this point.",
26
+ },
27
+ },
28
+ },
29
+ }
30
+ ]
31
+
32
+ available_functions = {'get_movie_recs': get_movie_recs}
33
+
34
+ system_prompt = """
35
+ You are a helpful assistant for customers of Swank Motion Pictures, a company that provides movie licensing
36
+ for various public and private events. Your job is to assist customers in selecting a movie. Customers usually
37
+ select movies based on the intended audience or event theme, and may also care about genre preference, movie length,
38
+ and mood. At your discretion, you may call a `get_movie_recs` function to query a recommender system.
39
+ It takes the entire conversation history as input and returns a list of movies as output.
40
+ Use the function to ground your response where appropriate.
41
+ If the user is asking to pick between options they provide, do not call the function. Otherwise, call the function.
42
+ Do not reveal to the user that you can query a recommender system.
43
+ Don't equivocate and take a stand if the user asks you a question.
44
+ If uncertain, provide information that will help the user make a decision. Don't repeat what the user said.
45
+ Be direct. Don't hedge. Omit disclaimers.
46
+ """
47
+
48
+ greeting = """
49
+ Hey there! Need help picking out a movie for your event? Just describe your audience or theme,
50
+ and I'll suggest some great options!
51
+ """
52
+
53
+ initial_state = [
54
+ {"role": "system", "content": system_prompt},
55
+ {"role": "assistant", "content": greeting},
56
+ ]
57
+
58
+ # response logic for chatbot
59
+ def respond(
60
+ user_message,
61
+ chat_history,
62
+ openai_chat_history,
63
+ ):
64
+ '''
65
+ :param user_message: string, the user's message
66
+ :param chat_history: list of lists, each sublist is a pair of user and assistant messages. This is rendered in the chatbot.
67
+ :param openai_chat_history: list of dicts, superset of chat_history that includes function calls. This is sent to OpenAI.
68
+ '''
69
+ openai_chat_history.append({'role': 'user', 'content': user_message})
70
+ chat_history.append([user_message, None])
71
+
72
+ # Step 1: send conversation and available functions to GPT
73
+ response = openai.ChatCompletion.create(
74
+ model="gpt-3.5-turbo",
75
+ messages=openai_chat_history,
76
+ functions=functions,
77
+ function_call="auto",
78
+ temperature=0,
79
+ stream=True,
80
+ )
81
+ for chunk in response:
82
+ delta = chunk.choices[0].delta
83
+
84
+ # Step 2: check if GPT wanted to call a function
85
+ if "function_call" in delta:
86
+ if "name" in delta.function_call:
87
+ function_name = delta["function_call"]["name"]
88
+ function_to_call = available_functions[function_name]
89
+
90
+ # Step 3: call the function
91
+ elif chunk.choices[0].finish_reason == "function_call":
92
+ # send conversation history that's visible in the chatbot
93
+ context = ""
94
+ for interaction in chat_history[:-1]:
95
+ context+=f"User: {interaction[0]}\nAssistant: {interaction[1]}\n"
96
+ context+=f"User: {user_message}" # include the latest message
97
+ print('calling function')
98
+ function_response = function_to_call(context=context)
99
+
100
+ # Step 4: send the info on the function call and function response to GPT
101
+ # include function call in history
102
+ openai_chat_history.append({
103
+ 'role': 'assistant',
104
+ 'content': None,
105
+ 'function_call': {'name': function_name, 'arguments': 'null'},
106
+ })
107
+ # include function response
108
+ openai_chat_history.append(
109
+ {
110
+ "role": "function",
111
+ "name": function_name,
112
+ "content": function_response,
113
+ }
114
+ )
115
+ # get a new response from GPT where it can see the function response
116
+ second_response = openai.ChatCompletion.create(
117
+ model="gpt-3.5-turbo",
118
+ messages=openai_chat_history,
119
+ stream=True,
120
+ )
121
+ for chunk2 in second_response:
122
+ if len(chunk2['choices'][0]['delta']) != 0:
123
+ if chat_history[-1][1] is None: chat_history[-1][1] = ""
124
+ chat_history[-1][1] += chunk2['choices'][0]['delta']['content']
125
+ yield "", chat_history, openai_chat_history
126
+ # if last chunk, update openai_chat_history with full message
127
+ if chunk2.choices[0].finish_reason == "stop":
128
+ openai_chat_history.append({'role': 'assistant', 'content': chat_history[-1][1]})
129
+ yield "", chat_history, openai_chat_history
130
+
131
+ # Step 5: If no function call, just return updated state variables
132
+ elif 'function_call' not in delta and len(delta)!=0:
133
+ if chat_history[-1][1] is None: chat_history[-1][1] = ""
134
+ chat_history[-1][1] += delta['content']
135
+ yield "", chat_history, openai_chat_history
136
+ # if last chunk, update openai_chat_history with full message
137
+ elif chunk.choices[0].finish_reason == 'stop':
138
+ openai_chat_history.append({'role': 'assistant', 'content': chat_history[-1][1]})
139
+ yield "", chat_history, openai_chat_history
140
+
141
+
142
+ ########################
143
+ ### Gradio interface ###
144
+ ########################
145
+
146
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
147
+ # This state variable also includes function calls and system message. Be careful with getting out of sync with the displayed conversation.
148
+ openai_history_state = gr.State(copy.deepcopy(initial_state))
149
+ # saved_input = gr.State() # for retry
150
+
151
+ with gr.Column(variant='panel'):
152
+ gr.Markdown(f"<h3 style='text-align: center; margin-bottom: 1rem'>{greeting}</h3>")
153
+ chatbot = gr.Chatbot()
154
+
155
+ with gr.Group():
156
+ # Input + submit buttons
157
+ with gr.Row():
158
+ input_box = gr.Textbox(
159
+ container=False,
160
+ show_label=False,
161
+ label='Message',
162
+ placeholder='Type a message...',
163
+ scale=7,
164
+ autofocus=True,
165
+ )
166
+ submit_btn = gr.Button('Submit', variant='primary', scale=1, min_width=150,)
167
+
168
+ # retry + clear buttons
169
+ with gr.Row():
170
+ retry_btn = gr.Button('Retry', variant='secondary',)
171
+ clear_btn = gr.Button('Clear', variant='secondary')
172
+
173
+ # example inputs
174
+ gr.Examples(
175
+ [
176
+ 'Please recommend some movies with lots of jumpscares or something with lots of blood.. I want to watch some movie that will not let me nor my cousins sleep soundly tonight.',
177
+ "Which movie is better? Warrior (2011) or Southpaw (2015)?. I'm looking to watch a boxing movie, and am not sure what to pick between Warrior (2011) or Southpaw (2015). I'm a big fan of both, Jake Gyllenhall and Tom Hardy and honestly just couldn't pick between the two",
178
+ ],
179
+ inputs=[input_box],
180
+ )
181
+
182
+ # bind events
183
+ gr.on(
184
+ triggers=[input_box.submit, submit_btn.click],
185
+ fn=respond,
186
+ inputs=[input_box, chatbot, openai_history_state],
187
+ outputs=[input_box, chatbot, openai_history_state],
188
+ )
189
+
190
+ clear_btn.click(
191
+ fn=lambda: ('', [], initial_state,),
192
+ inputs=None,
193
+ outputs=[input_box, chatbot, openai_history_state,],
194
+ queue=False,
195
+ api_name=False,
196
+ )
197
+
198
+ demo.queue()
199
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ gradio
2
+ openai<1.0
tools.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ External functions that the chatbot can use at its discretion.
3
+ """
4
+ import os
5
+ import openai
6
+
7
+ openai.api_key = os.environ['OPENAI_API_KEY']
8
+
9
+ def get_movie_recs(context, K=5):
10
+ system_prompt=f"""
11
+ Pretend you are a movie recommendation system. I will give you a conversation between a user
12
+ and you (a recommender system). Based on the conversation, reply to me with {K} recommendations
13
+ without extra sentences.
14
+ """
15
+ user_query=f"Here is the conversation:\n{context}"
16
+ response = openai.ChatCompletion.create(
17
+ model="gpt-3.5-turbo",
18
+ messages=[{'role': 'system', 'content': system_prompt}, {'role': 'user', 'content': user_query}],
19
+ )
20
+ return response.choices[0].message.content