tareesh commited on
Commit
1f4bec9
1 Parent(s): b34faf5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -14,29 +14,29 @@ text = st.text_area('Enter the text:')
14
 
15
  if text:
16
  model_inputs = tokenizer_translation(text, return_tensors="pt")
17
- lg = st.number_input("Select Language: 1.Hindi, 2.Telugu, 3.Gujarati, 4.Bengali")
18
- if lg==1:
19
  generated_tokens = model_translation.generate(
20
  **model_inputs,
21
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["hi_IN"]
22
  )
23
  translation = tokenizer_translation.batch_decode(generated_tokens, skip_special_tokens=True)
24
- st.json(translation)
25
- elif lg==2:
26
  generated_tokens = model_translation.generate(
27
  **model_inputs,
28
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["te_IN"]
29
  )
30
  translation = tokenizer_translation.batch_decode(generated_tokens, skip_special_tokens=True)
31
- st.json(translation)
32
- elif lg==3:
33
  generated_tokens = model_translation.generate(
34
  **model_inputs,
35
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["gu_IN"]
36
  )
37
  translation = tokenizer_translation.batch_decode(generated_tokens, skip_special_tokens=True)
38
- st.json(translation)
39
- elif lg==4:
40
  generated_tokens = model_translation.generate(
41
  **model_inputs,
42
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["bn_IN"]
 
14
 
15
  if text:
16
  model_inputs = tokenizer_translation(text, return_tensors="pt")
17
+ lg = st.text_input("Select Language: hi.Hindi, te.Telugu, gu.Gujarati, bn.Bengali")
18
+ if lg=='hi':
19
  generated_tokens = model_translation.generate(
20
  **model_inputs,
21
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["hi_IN"]
22
  )
23
  translation = tokenizer_translation.batch_decode(generated_tokens, skip_special_tokens=True)
24
+ st.write(translation)
25
+ elif lg=='te':
26
  generated_tokens = model_translation.generate(
27
  **model_inputs,
28
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["te_IN"]
29
  )
30
  translation = tokenizer_translation.batch_decode(generated_tokens, skip_special_tokens=True)
31
+ st.write(translation)
32
+ elif lg=='gu':
33
  generated_tokens = model_translation.generate(
34
  **model_inputs,
35
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["gu_IN"]
36
  )
37
  translation = tokenizer_translation.batch_decode(generated_tokens, skip_special_tokens=True)
38
+ st.write(translation)
39
+ elif lg=='bn':
40
  generated_tokens = model_translation.generate(
41
  **model_inputs,
42
  forced_bos_token_id=tokenizer_translation.lang_code_to_id["bn_IN"]