Fouzi Takelait commited on
Commit
ed3634b
1 Parent(s): 20aa2fc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -27,7 +27,7 @@ def translator_fn_baseline(text_in):
27
  input_ids = source_tokenizer.encode(text_in, return_tensors="pt")
28
  output_ids = model.generate(
29
  input_ids,
30
- max_length=len(text_in.split())+1,
31
  bos_token_id=target_tokenizer.bos_token_id,
32
  eos_token_id=target_tokenizer.eos_token_id,
33
  pad_token_id=target_tokenizer.pad_token_id,
@@ -43,7 +43,7 @@ def translator_fn_roberta(text_in):
43
  input_ids_pretrained_roberta = source_tokenizer_pretrained_roberta.encode(text_in, return_tensors="pt")
44
  output_ids_pretrained_roberta = model_pretrained_roberta.generate(
45
  input_ids_pretrained_roberta,
46
- max_length=len(text_in.split())+1,
47
  bos_token_id=target_tokenizer_pretrained_roberta.bos_token_id,
48
  eos_token_id=target_tokenizer_pretrained_roberta.eos_token_id,
49
  pad_token_id=target_tokenizer_pretrained_roberta.pad_token_id,
 
27
  input_ids = source_tokenizer.encode(text_in, return_tensors="pt")
28
  output_ids = model.generate(
29
  input_ids,
30
+ max_length=len(text_in.split())+3,
31
  bos_token_id=target_tokenizer.bos_token_id,
32
  eos_token_id=target_tokenizer.eos_token_id,
33
  pad_token_id=target_tokenizer.pad_token_id,
 
43
  input_ids_pretrained_roberta = source_tokenizer_pretrained_roberta.encode(text_in, return_tensors="pt")
44
  output_ids_pretrained_roberta = model_pretrained_roberta.generate(
45
  input_ids_pretrained_roberta,
46
+ max_length=len(text_in.split())+3,
47
  bos_token_id=target_tokenizer_pretrained_roberta.bos_token_id,
48
  eos_token_id=target_tokenizer_pretrained_roberta.eos_token_id,
49
  pad_token_id=target_tokenizer_pretrained_roberta.pad_token_id,