ryanrwatkins commited on
Commit
62842f4
1 Parent(s): b890649

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -9
app.py CHANGED
@@ -29,7 +29,7 @@ prompt_templates = {"All Needs Experts": "Respond as if you are combination of a
29
  actor_description = {"All Needs Experts": "<div style='float: left;margin: 0px 5px 0px 5px;'><img src='https://na.weshareresearch.com/wp-content/uploads/2023/04/experts2.jpg' alt='needs expert image' style='width:70px;align:top;'></div>A combiation of all needs assessment experts."}
30
 
31
  def get_empty_state():
32
- return {"total_tokens": 0, "messages": []}
33
 
34
 
35
  def download_prompt_templates():
@@ -74,7 +74,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
74
  history = state['messages']
75
 
76
  if not prompt:
77
- return gr.update(value=''), [(history[i]['content'], history[i+1]['content']) for i in range(0, len(history)-1, 2)], f"Total tokens used: {state['total_tokens']}", state
78
 
79
  prompt_template = prompt_templates[prompt_template]
80
 
@@ -105,7 +105,9 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
105
  #state.append(completion.copy())
106
 
107
  completion = { "content": completion }
108
- state['total_tokens'] += completion['usage']['total_tokens']
 
 
109
 
110
 
111
 
@@ -117,10 +119,10 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
117
  # }
118
  # history.append(error.copy())
119
 
120
- total_tokens_used_msg = f"Total tokens used: {state['total_tokens']}"
121
 
122
  chat_messages = [(prompt_msg['content'], completion['content'])]
123
- return '', chat_messages, total_tokens_used_msg, state
124
 
125
 
126
  def clear_conversation():
@@ -158,7 +160,7 @@ with gr.Blocks(css=css) as demo:
158
  input_message = gr.Textbox(show_label=False, placeholder="Enter your needs assessment question", visible=True).style(container=False)
159
 
160
  btn_submit = gr.Button("Submit")
161
- total_tokens_str = gr.Markdown(elem_id="total_tokens_str")
162
  btn_clear_conversation = gr.Button("Start New Conversation")
163
  with gr.Column():
164
  prompt_template = gr.Dropdown(label="Choose an Expert:", choices=list(prompt_templates.keys()))
@@ -169,9 +171,9 @@ with gr.Blocks(css=css) as demo:
169
  context_length = gr.Slider(minimum=1, maximum=5, value=2, step=1, label="Context Length", info="Number of previous questions you have asked.")
170
 
171
 
172
- btn_submit.click(submit_message, [ input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, total_tokens_str, state])
173
- input_message.submit(submit_message, [ input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, total_tokens_str, state])
174
- btn_clear_conversation.click(clear_conversation, [], [input_message, chatbot, total_tokens_str, state])
175
  prompt_template.change(on_prompt_template_change_description, inputs=[prompt_template], outputs=[prompt_template_preview])
176
 
177
 
 
29
  actor_description = {"All Needs Experts": "<div style='float: left;margin: 0px 5px 0px 5px;'><img src='https://na.weshareresearch.com/wp-content/uploads/2023/04/experts2.jpg' alt='needs expert image' style='width:70px;align:top;'></div>A combiation of all needs assessment experts."}
30
 
31
  def get_empty_state():
32
+ return { "messages": []}
33
 
34
 
35
  def download_prompt_templates():
 
74
  history = state['messages']
75
 
76
  if not prompt:
77
+ return gr.update(value=''), [(history[i]['content'], history[i+1]['content']) for i in range(0, len(history)-1, 2)], state
78
 
79
  prompt_template = prompt_templates[prompt_template]
80
 
 
105
  #state.append(completion.copy())
106
 
107
  completion = { "content": completion }
108
+
109
+
110
+ #state['total_tokens'] += completion['usage']['total_tokens']
111
 
112
 
113
 
 
119
  # }
120
  # history.append(error.copy())
121
 
122
+ #total_tokens_used_msg = f"Total tokens used: {state['total_tokens']}"
123
 
124
  chat_messages = [(prompt_msg['content'], completion['content'])]
125
+ return '', chat_messages, state # total_tokens_used_msg,
126
 
127
 
128
  def clear_conversation():
 
160
  input_message = gr.Textbox(show_label=False, placeholder="Enter your needs assessment question", visible=True).style(container=False)
161
 
162
  btn_submit = gr.Button("Submit")
163
+ #total_tokens_str = gr.Markdown(elem_id="total_tokens_str")
164
  btn_clear_conversation = gr.Button("Start New Conversation")
165
  with gr.Column():
166
  prompt_template = gr.Dropdown(label="Choose an Expert:", choices=list(prompt_templates.keys()))
 
171
  context_length = gr.Slider(minimum=1, maximum=5, value=2, step=1, label="Context Length", info="Number of previous questions you have asked.")
172
 
173
 
174
+ btn_submit.click(submit_message, [ input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, state])
175
+ input_message.submit(submit_message, [ input_message, prompt_template, temperature, max_tokens, context_length, state], [input_message, chatbot, state])
176
+ btn_clear_conversation.click(clear_conversation, [], [input_message, chatbot, state])
177
  prompt_template.change(on_prompt_template_change_description, inputs=[prompt_template], outputs=[prompt_template_preview])
178
 
179