low-resource-language-model-adaptation
/
adapted_tokenizers
/replaced-opt-hau
/opt_500-replace_full-hau-opt
/tokenizer_config.json
{ | |
"add_bos_token": true, | |
"add_prefix_space": false, | |
"added_tokens_decoder": { | |
"1": { | |
"content": "<pad>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"2": { | |
"content": "</s>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
} | |
}, | |
"bos_token": "</s>", | |
"clean_up_tokenization_spaces": true, | |
"eos_token": "</s>", | |
"errors": "replace", | |
"model_max_length": 1000000000000000019884624838656, | |
"pad_token": "<pad>", | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": "</s>" | |
} | |