vishwask commited on
Commit
ee3000a
·
verified ·
1 Parent(s): 32e5738

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -28,20 +28,19 @@ languages_list = [("Gujarati", "gu_IN"), ('Hindi',"hi_IN") , ("Bengali","bn_IN")
28
  ("Marathi","mr_IN"), ("Tamil","ta_IN"), ("Telugu","te_IN")]
29
 
30
 
 
31
  def intitalize_lang(language):
32
- return language
33
 
34
  def english_to_indian(sentence):
35
- lang = intitalize_lang()
36
  translation_tokenizer.src_lang = "en_xx"
37
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
38
- generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id[lang] )
39
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
40
 
41
 
42
  def indian_to_english(sentence):
43
- lang = intitalize_lang()
44
- translation_tokenizer.src_lang = lang
45
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
46
  generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id["en_XX"] )
47
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
 
28
  ("Marathi","mr_IN"), ("Tamil","ta_IN"), ("Telugu","te_IN")]
29
 
30
 
31
+ lang_global = ''
32
  def intitalize_lang(language):
33
+ lang_global = language
34
 
35
  def english_to_indian(sentence):
 
36
  translation_tokenizer.src_lang = "en_xx"
37
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
38
+ generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id[lang_global] )
39
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
40
 
41
 
42
  def indian_to_english(sentence):
43
+ translation_tokenizer.src_lang = lang_global
 
44
  encoded_hi = translation_tokenizer(sentence, return_tensors="pt")
45
  generated_tokens = translation_model.generate(**encoded_hi, forced_bos_token_id=translation_tokenizer.lang_code_to_id["en_XX"] )
46
  return (translation_tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))