demo-app1 / app.py
tareesh's picture
Update app.py
e623ffc
raw
history blame
684 Bytes
import streamlit as st
from transformers import MBartForConditionalGeneration, MBart50TokenizerFast
import sentencepiece
text = st.text_area('Enter the text:')
model = MBartForConditionalGenerationneration.from_pretrained("facebook/mbart-large-50-one-to-many-mmt")
tokenizer = MBart50TokenizerFast.from_pretrained("facebook/mbart-large-50-one-to-many-mmt", src_large="en_XX")
if text:
model_inputs = tokenizer(text, return_tensors="pt")
generated_tokens = model.generate(
**model_inputs,
forced_bos_token_id=tokenizer.lang_code_to_id["hi_IN"]
)
translation = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
st.json(translation)