vishnukk33 commited on
Commit
c5bb6eb
1 Parent(s): 19768fc

Delete tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +0 -1
tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"errors": "replace", "bos_token": {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "unk_token": {"content": "<unk>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sep_token": {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "cls_token": {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "pad_token": {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": true, "added_tokens_decoder": {"0": {"content": "<mask>", "lstrip": true, "normalized": false, "rstrip": false, "single_word": false, "special": true}, "1": {"content": "<pad>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true}, "2": {"content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true}, "3": {"content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true}, "4": {"content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true}}, "clean_up_tokenization_spaces": true, "max_length": 128, "model_max_length": 1000000000000000019884624838656, "pad_to_multiple_of": null, "pad_token_type_id": 0, "padding_side": "right", "stride": 0, "trim_offsets": true, "truncation_side": "right", "truncation_strategy": "longest_first", "special_tokens_map_file": "cache_dir/8ea647fe2507dd9424cae28e30a169caf024d03e72369935c0ce9cf791bd2e6d.50c9a6a3342271e7e900bb03520d7f844b78e2b2ef8352a0239b688c7d12bdc6", "name_or_path": "Raj-Sanjay-Shah/baby_berta_duplicate", "tokenizer_class": "RobertaTokenizer"}