LysandreJik commited on
Commit
36fdf23
1 Parent(s): a48b6fe
config.json CHANGED
@@ -17,8 +17,9 @@
17
  "num_hash_functions": 8,
18
  "num_hidden_layers": 5,
19
  "pad_token_id": 0,
20
- "transformers_version": "4.10.0.dev0",
21
  "type_vocab_size": 16,
22
  "upsampling_kernel_size": 4,
23
- "use_cache": true
 
24
  }
17
  "num_hash_functions": 8,
18
  "num_hidden_layers": 5,
19
  "pad_token_id": 0,
20
+ "transformers_version": "4.11.0.dev0",
21
  "type_vocab_size": 16,
22
  "upsampling_kernel_size": 4,
23
+ "use_cache": true,
24
+ "vocab_size": 1114112
25
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a2f045238fbcbfc3a026ec35bfe5525915db33313d02f7e03237d295b2dfc0f
3
- size 4488171
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f498bfec03188b66d8294924e94112e1db38ffaa23ac63bc88a2d789299f8789
3
+ size 4488299
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "sep_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": {"content": "\u0000", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "cls_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "mask_token": {"content": "", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true}}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sep_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "cls_token": {"content": "", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "pad_token": {"content": "\u0000", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "mask_token": {"content": "", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "model_max_length": 2048, "special_tokens_map_file": "/home/lysandre/.cache/huggingface/transformers/00ffef2919c64d09945b6c06b1c03c4d18bf55f88013390d4443a1eb8c4f8a10.ab71f530366fe02e2834427e7b90198bfd0d573bc4279bfafdb2b95fe2b46dde", "tokenizer_file": null, "name_or_path": "google/canine-s", "tokenizer_class": "CanineTokenizer"}