teaevo commited on
Commit
ca38751
1 Parent(s): 23c1edb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -3,6 +3,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  from transformers import TapexTokenizer, BartForConditionalGeneration
4
  import pandas as pd
5
  import torch
 
 
 
 
6
  #import pkg_resources
7
 
8
  '''
@@ -42,8 +46,8 @@ bot_input_ids = None
42
 
43
  def chatbot_response(user_message):
44
 
45
- global new_chat
46
  global chat_history_ids
 
47
  # Check if the user input is a question
48
  is_question = "?" in user_message
49
 
@@ -61,7 +65,7 @@ def chatbot_response(user_message):
61
  response = chatbot_tokenizer.decode(outputs[0], skip_special_tokens=True)
62
  '''
63
  # encode the new user input, add the eos_token and return a tensor in Pytorch
64
- new_user_input_ids = chatbot_tokenizer.encode("User: " + user_message + chatbot_tokenizer.eos_token, return_tensors='pt')
65
 
66
  # append the new user input tokens to the chat history
67
  if chat_history_ids is not None:
 
3
  from transformers import TapexTokenizer, BartForConditionalGeneration
4
  import pandas as pd
5
  import torch
6
+
7
+ import numpy as np
8
+ import time
9
+ import os
10
  #import pkg_resources
11
 
12
  '''
 
46
 
47
  def chatbot_response(user_message):
48
 
 
49
  global chat_history_ids
50
+ global bot_input_ids
51
  # Check if the user input is a question
52
  is_question = "?" in user_message
53
 
 
65
  response = chatbot_tokenizer.decode(outputs[0], skip_special_tokens=True)
66
  '''
67
  # encode the new user input, add the eos_token and return a tensor in Pytorch
68
+ new_user_input_ids = chatbot_tokenizer.encode(user_message + chatbot_tokenizer.eos_token, return_tensors='pt')
69
 
70
  # append the new user input tokens to the chat history
71
  if chat_history_ids is not None: