Update tokenizer_config.json
Browse filesset task "entity_pair_classification"
- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sp_model_kwargs": {}, "task":
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sp_model_kwargs": {}, "task": "entity_pair_classification", "max_entity_length": 32, "max_mention_length": 30, "entity_token_1": {"content": "<ent>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "entity_token_2": {"content": "<ent2>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "entity_unk_token": "[UNK]", "entity_pad_token": "[PAD]", "entity_mask_token": "[MASK]", "entity_mask2_token": "[MASK2]", "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "studio-ousia/mluke-large-lite", "additional_special_tokens": [{"content": "<ent>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, {"content": "<ent2>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, {"content": "<ent>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, {"content": "<ent2>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}], "tokenizer_file": null, "tokenizer_class": "MLukeTokenizer"}
|