How to run the model
from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer
model = M2M100ForConditionalGeneration.from_pretrained("transZ/M2M_Vi_Ba")
tokenizer = M2M100Tokenizer.from_pretrained("transZ/M2M_Vi_Ba")
tokenizer.src_lang = "vi"
vi_text = "Hôm nay ba đi chợ."
encoded_vi = tokenizer(vi_text, return_tensors="pt")
generated_tokens = model.generate(**encoded_vi, forced_bos_token_id=tokenizer.get_lang_id("ba"))
translate = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)[0]
print(translate)