AndriiPets's picture
initial
90748dd
raw
history blame
438 Bytes
from transformers import FSMTForConditionalGeneration, FSMTTokenizer
mname = "facebook/wmt19-ru-en"
tokenizer = FSMTTokenizer.from_pretrained(
mname)
model = FSMTForConditionalGeneration.from_pretrained(
mname)
def translate_ru_en(text):
input_ids = tokenizer.encode(text, return_tensors="pt")
outputs = model.generate(input_ids)
decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
return decoded