teaevo commited on
Commit
978fd4d
1 Parent(s): 928fc23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -64,9 +64,9 @@ sql_tokenizer = TapexTokenizer.from_pretrained(sql_model_name)
64
  sql_model = BartForConditionalGeneration.from_pretrained(sql_model_name)
65
 
66
  #sql_response = None
67
- conversation_history = []
68
 
69
- def predict(input, history=[]):
70
 
71
  #global sql_response
72
  # Check if the user input is a question
@@ -104,6 +104,7 @@ def sqlquery(input):
104
  sql_outputs = sql_model.generate(**sql_encoding)
105
  sql_response = sql_tokenizer.batch_decode(sql_outputs, skip_special_tokens=True)
106
 
 
107
  global conversation_history
108
 
109
  # Maintain the conversation history
@@ -111,12 +112,13 @@ def sqlquery(input):
111
  conversation_history.append("Bot: " + " ".join(sql_response) )
112
 
113
  output = " ".join(conversation_history)
114
-
115
  return output
 
 
116
 
117
 
118
  chat_interface = gr.Interface(
119
- fn=predict,
120
  theme="default",
121
  css=".footer {display:none !important}",
122
  inputs=["text", "state"],
 
64
  sql_model = BartForConditionalGeneration.from_pretrained(sql_model_name)
65
 
66
  #sql_response = None
67
+ #conversation_history = []
68
 
69
+ def chat(input, history=[]):
70
 
71
  #global sql_response
72
  # Check if the user input is a question
 
104
  sql_outputs = sql_model.generate(**sql_encoding)
105
  sql_response = sql_tokenizer.batch_decode(sql_outputs, skip_special_tokens=True)
106
 
107
+ '''
108
  global conversation_history
109
 
110
  # Maintain the conversation history
 
112
  conversation_history.append("Bot: " + " ".join(sql_response) )
113
 
114
  output = " ".join(conversation_history)
 
115
  return output
116
+ '''
117
+ return sql_response
118
 
119
 
120
  chat_interface = gr.Interface(
121
+ fn=chat,
122
  theme="default",
123
  css=".footer {display:none !important}",
124
  inputs=["text", "state"],