kouki321 commited on
Commit
d0abdb3
·
verified ·
1 Parent(s): b757b40

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -204,7 +204,7 @@ if uploaded_file:
204
  t_start = time()
205
  full_prompt = f"<|user|>\nQuestion: {query}\n<|assistant|>"
206
  input_ids = tokenizer(full_prompt, return_tensors="pt").input_ids
207
- st.session_state.input_tokens_count += input_ids.shape[-1]
208
  t_end = time()
209
  log.append(f"✍️ Tokenization Time: {t_end - t_start:.2f} s")
210
 
@@ -215,8 +215,8 @@ if uploaded_file:
215
  log.append(f"💡 Generation Time: {last_generation_time:.2f} s")
216
 
217
  generated_tokens_count = output_ids.shape[-1]
218
- st.session_state.generated_tokens_count += generated_tokens_count
219
- st.session_state.output_tokens_count = generated_tokens_count
220
 
221
  response = tokenizer.decode(output_ids[0], skip_special_tokens=True)
222
 
 
204
  t_start = time()
205
  full_prompt = f"<|user|>\nQuestion: {query}\n<|assistant|>"
206
  input_ids = tokenizer(full_prompt, return_tensors="pt").input_ids
207
+ input_tokens_count += input_ids.shape[-1]
208
  t_end = time()
209
  log.append(f"✍️ Tokenization Time: {t_end - t_start:.2f} s")
210
 
 
215
  log.append(f"💡 Generation Time: {last_generation_time:.2f} s")
216
 
217
  generated_tokens_count = output_ids.shape[-1]
218
+ generated_tokens_count += generated_tokens_count
219
+ output_tokens_count = generated_tokens_count
220
 
221
  response = tokenizer.decode(output_ids[0], skip_special_tokens=True)
222