LysandreJik commited on
Commit
09ebe8d
·
1 Parent(s): f9b14c3
config.json CHANGED
@@ -14,9 +14,10 @@
14
  ],
15
  "axial_pos_shape": [
16
  4,
17
- 8
18
  ],
19
  "chunk_size_lm_head": 0,
 
20
  "eos_token_id": 2,
21
  "feed_forward_size": 32,
22
  "hash_seed": 0,
@@ -24,7 +25,6 @@
24
  "hidden_dropout_prob": 0.1,
25
  "hidden_size": 32,
26
  "initializer_range": 0.02,
27
- "is_decoder": true,
28
  "layer_norm_eps": 1e-12,
29
  "local_attention_probs_dropout_prob": 0.1,
30
  "local_attn_chunk_length": 4,
@@ -42,7 +42,7 @@
42
  "num_hidden_layers": 4,
43
  "pad_token_id": 0,
44
  "tie_word_embeddings": false,
45
- "transformers_version": "4.10.0.dev0",
46
  "use_cache": true,
47
- "vocab_size": 100
48
  }
 
14
  ],
15
  "axial_pos_shape": [
16
  4,
17
+ 25
18
  ],
19
  "chunk_size_lm_head": 0,
20
+ "classifier_dropout": null,
21
  "eos_token_id": 2,
22
  "feed_forward_size": 32,
23
  "hash_seed": 0,
 
25
  "hidden_dropout_prob": 0.1,
26
  "hidden_size": 32,
27
  "initializer_range": 0.02,
 
28
  "layer_norm_eps": 1e-12,
29
  "local_attention_probs_dropout_prob": 0.1,
30
  "local_attn_chunk_length": 4,
 
42
  "num_hidden_layers": 4,
43
  "pad_token_id": 0,
44
  "tie_word_embeddings": false,
45
+ "transformers_version": "4.11.0.dev0",
46
  "use_cache": true,
47
+ "vocab_size": 1000
48
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c8be031ccbff1d52a943941b4f4fe311efcc1cbfaad880c41d4fd220b8d17a1e
3
- size 368150
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f7979bce783729044177dca00617f266365f397861c607be854ec2f41c92235
3
+ size 718550
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>"}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "additional_special_tokens": [], "model_max_length": 524288, "special_tokens_map_file": null, "name_or_path": "google/reformer-crime-and-punishment", "tokenizer_class": "ReformerTokenizer"}