Summarization / tokenizer_config.json
roy
gogamza: summarization
f63bba1
raw
history blame contribute delete
352 Bytes
{
"model_max_length": 1000000000000000019884624838656,
"name_or_path": "gogamza/kobart-summarization",
"special_tokens_map_file": "/opt/ml/.cache/huggingface/hub/models--gogamza--kobart-summarization/snapshots/8a63d6913edc0e16a902e3fa8b688a134f0dd776/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast",
"use_fast": true
}