patrickvonplaten commited on
Commit
d958377
1 Parent(s): b8089eb

correct checkpoints see: https://github.com/facebookresearch/metaseq/pull/164

Browse files
config.json CHANGED
@@ -1,15 +1,17 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "relu",
4
  "architectures": [
5
- "OPTForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 2,
9
- "hidden_size": 4096,
10
  "dropout": 0.1,
11
  "eos_token_id": 2,
12
  "ffn_dim": 16384,
 
13
  "init_std": 0.02,
14
  "layerdrop": 0.0,
15
  "max_position_embeddings": 2048,
@@ -17,10 +19,10 @@
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 32,
19
  "pad_token_id": 1,
 
20
  "torch_dtype": "float16",
21
- "transformers_version": "4.19.0.dev0",
22
  "use_cache": true,
23
  "vocab_size": 50272,
24
- "word_embed_proj_dim": 4096,
25
- "prefix": "</s>"
26
  }
1
  {
2
+ "_remove_final_layer_norm": false,
3
  "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
6
+ "OPTModel"
7
  ],
8
  "attention_dropout": 0.0,
9
  "bos_token_id": 2,
10
+ "do_layer_norm_before": true,
11
  "dropout": 0.1,
12
  "eos_token_id": 2,
13
  "ffn_dim": 16384,
14
+ "hidden_size": 4096,
15
  "init_std": 0.02,
16
  "layerdrop": 0.0,
17
  "max_position_embeddings": 2048,
19
  "num_attention_heads": 32,
20
  "num_hidden_layers": 32,
21
  "pad_token_id": 1,
22
+ "prefix": "</s>",
23
  "torch_dtype": "float16",
24
+ "transformers_version": "4.21.0.dev0",
25
  "use_cache": true,
26
  "vocab_size": 50272,
27
+ "word_embed_proj_dim": 4096
 
28
  }
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3417d01bca959bd136493f0b06413b8e85fd8f9c5101f62a55663c3ba74eb234
3
- size 9960733937
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fe9acfd39b64d2f51371afac60af02289eea8451c5e28ede2f67fec5505f50c
3
+ size 9960750957
pytorch_model.bin.index.json CHANGED
@@ -1,10 +1,12 @@
1
  {
2
  "metadata": {
3
- "total_size": 13316931584
4
  },
5
  "weight_map": {
6
  "decoder.embed_positions.weight": "pytorch_model-00001-of-00002.bin",
7
  "decoder.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
 
 
8
  "decoder.layers.0.fc1.bias": "pytorch_model-00001-of-00002.bin",
9
  "decoder.layers.0.fc1.weight": "pytorch_model-00001-of-00002.bin",
10
  "decoder.layers.0.fc2.bias": "pytorch_model-00001-of-00002.bin",
1
  {
2
  "metadata": {
3
+ "total_size": 13316947968
4
  },
5
  "weight_map": {
6
  "decoder.embed_positions.weight": "pytorch_model-00001-of-00002.bin",
7
  "decoder.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "decoder.final_layer_norm.bias": "pytorch_model-00001-of-00002.bin",
9
+ "decoder.final_layer_norm.weight": "pytorch_model-00001-of-00002.bin",
10
  "decoder.layers.0.fc1.bias": "pytorch_model-00001-of-00002.bin",
11
  "decoder.layers.0.fc1.weight": "pytorch_model-00001-of-00002.bin",
12
  "decoder.layers.0.fc2.bias": "pytorch_model-00001-of-00002.bin",