ryanrwatkins commited on
Commit
62603a5
1 Parent(s): f9c7c43

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -9
app.py CHANGED
@@ -98,14 +98,14 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
98
  chain = load_qa_chain(ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff")
99
  completion = chain.run(input_documents=docs, question=query)
100
  completion = { "content": completion }
101
- chat_messages = [(prompt_msg['content'], completion['content'])]
102
 
103
  get_empty_state()
104
  state.append(completion.copy())
105
 
106
  state['total_tokens'] += completion['usage']['total_tokens']
107
 
108
- return '', chat_messages, total_tokens_used_msg, state
109
 
110
  except Exception as e:
111
  history.append(prompt_msg.copy())
@@ -117,13 +117,8 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
117
 
118
  total_tokens_used_msg = f"Total tokens used: {state['total_tokens']}"
119
 
120
-
121
-
122
-
123
-
124
-
125
-
126
-
127
 
128
 
129
  def clear_conversation():
 
98
  chain = load_qa_chain(ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff")
99
  completion = chain.run(input_documents=docs, question=query)
100
  completion = { "content": completion }
101
+
102
 
103
  get_empty_state()
104
  state.append(completion.copy())
105
 
106
  state['total_tokens'] += completion['usage']['total_tokens']
107
 
108
+
109
 
110
  except Exception as e:
111
  history.append(prompt_msg.copy())
 
117
 
118
  total_tokens_used_msg = f"Total tokens used: {state['total_tokens']}"
119
 
120
+ chat_messages = [(prompt_msg['content'], completion['content'])]
121
+ return '', chat_messages, total_tokens_used_msg, state
 
 
 
 
 
122
 
123
 
124
  def clear_conversation():