Update app.py
Browse files
app.py
CHANGED
@@ -294,9 +294,9 @@ model = MBartForConditionalGeneration.from_pretrained(model_name)
|
|
294 |
tokenizer = MBart50Tokenizer.from_pretrained(model_name)
|
295 |
|
296 |
def get_response(input_text):
|
297 |
-
model_inputs = tokenizer(input_text, return_tensors="pt")
|
298 |
-
generated_tokens = model.generate(**model_inputs,forced_bos_token_id=tokenizer.lang_code_to_id["en_XX"])
|
299 |
-
translation= tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
|
300 |
|
301 |
#string2=" ".join(map(str,translation ))
|
302 |
|
@@ -315,7 +315,7 @@ def get_response(input_text):
|
|
315 |
#if sentence== "quit":
|
316 |
#break
|
317 |
|
318 |
-
sentence= tokenize(
|
319 |
X = bag_of_words(sentence, all_words)
|
320 |
X = X.reshape(1, X.shape[0])
|
321 |
X = torch.from_numpy(X).to(device)
|
|
|
294 |
tokenizer = MBart50Tokenizer.from_pretrained(model_name)
|
295 |
|
296 |
def get_response(input_text):
|
297 |
+
#model_inputs = tokenizer(input_text, return_tensors="pt")
|
298 |
+
#generated_tokens = model.generate(**model_inputs,forced_bos_token_id=tokenizer.lang_code_to_id["en_XX"])
|
299 |
+
#translation= tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
|
300 |
|
301 |
#string2=" ".join(map(str,translation ))
|
302 |
|
|
|
315 |
#if sentence== "quit":
|
316 |
#break
|
317 |
|
318 |
+
sentence= tokenize(input_text)
|
319 |
X = bag_of_words(sentence, all_words)
|
320 |
X = X.reshape(1, X.shape[0])
|
321 |
X = torch.from_numpy(X).to(device)
|