abkhaz / tokenizer_config.json
Maghrebi's picture
Update tokenizer_config.json
f0fd352
{
"additional_special_tokens": null,
"eos_token": "</s>",
"extra_ids": 0,
"model_max_length": 1000000000000000019884624838656,
"name_or_path": "AutoTrain",
"pad_token": "<pad>",
"sp_model_kwargs": {},
"special_tokens_map_file": "null"
"tokenizer_class": "T5Tokenizer",
"unk_token": "<unk>"
}from transformers import MT5ForConditionalGeneration, T5Tokenizer
model = MT5ForConditionalGeneration.from_pretrained("google/mt5-small")
tokenizer = T5Tokenizer.from_pretrained("google/mt5-small")
article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien."
summary = "Weiter Verhandlung in Syrien."
inputs = tokenizer(article, text_target=summary, return_tensors="pt")
outputs = model(**inputs)
loss = outputs.loss