Vignesh1997 commited on
Commit
ae560ed
1 Parent(s): 5d70ea5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -148,6 +148,9 @@ def find_algnments(
148
  ):
149
  source_tokens = bert_tokenizer(source_text, return_tensors="pt")
150
  target_tokens = bert_tokenizer(translated_text, return_tensors="pt")
 
 
 
151
  bpe_source_map = []
152
  for i in source_text.split():
153
  bpe_source_map += len(bert_tokenizer.tokenize(i)) * [i]
 
148
  ):
149
  source_tokens = bert_tokenizer(source_text, return_tensors="pt")
150
  target_tokens = bert_tokenizer(translated_text, return_tensors="pt")
151
+ source_tokens_len = len(bert_tokenizer.tokenize(source_text))
152
+ target_tokens_len = len(bert_tokenizer.tokenize(translated_text))
153
+
154
  bpe_source_map = []
155
  for i in source_text.split():
156
  bpe_source_map += len(bert_tokenizer.tokenize(i)) * [i]