Spaces:
Sleeping
Sleeping
# Use a pipeline as a high-level helper | |
from transformers import pipeline | |
print("Hello World") | |
pipe = pipeline("translation", model="facebook/nllb-200-distilled-600M") | |
from transformers import AutoTokenizer, M2M100ForConditionalGeneration | |
model = M2M100ForConditionalGeneration.from_pretrained("facebook/m2m100_418M") | |
tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M") | |
text_to_translate = "Life is like a box of chocolates" | |
model_inputs = tokenizer(text_to_translate, return_tensors="pt") | |
# translate to French | |
gen_tokens = model.generate(**model_inputs, forced_bos_token_id=tokenizer.get_lang_id("fr")) | |
print(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True)) |