mkardas commited on
Commit
44195ac
1 Parent(s): 6055184

Convert checkpoint files to float16 (#6)

Browse files

- Convert checkpoint files to float16 (241c2933133be42da3bfbf11388af23a629f7b7c)

Files changed (2) hide show
  1. config.json +4 -4
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "OPTForCausalLM"
8
  ],
9
  "attention_dropout": 0.1,
10
- "bias": false,
11
  "bos_token_id": 0,
12
  "do_layer_norm_before": true,
13
  "dropout": 0.1,
@@ -15,7 +15,7 @@
15
  "ffn_dim": 8192,
16
  "hidden_size": 2048,
17
  "init_std": 0.02,
18
- "layer_norm_elementwise_affine": false,
19
  "layerdrop": 0.0,
20
  "learned_embeddings": true,
21
  "max_position_embeddings": 2048,
@@ -24,8 +24,8 @@
24
  "num_hidden_layers": 24,
25
  "pad_token_id": 1,
26
  "scale_embeddings": false,
27
- "torch_dtype": "float32",
28
- "transformers_version": "4.25.0.dev0",
29
  "use_cache": true,
30
  "vocab_size": 50000,
31
  "word_embed_proj_dim": 2048
 
7
  "OPTForCausalLM"
8
  ],
9
  "attention_dropout": 0.1,
10
+ "enable_bias": true,
11
  "bos_token_id": 0,
12
  "do_layer_norm_before": true,
13
  "dropout": 0.1,
 
15
  "ffn_dim": 8192,
16
  "hidden_size": 2048,
17
  "init_std": 0.02,
18
+ "layer_norm_elementwise_affine": true,
19
  "layerdrop": 0.0,
20
  "learned_embeddings": true,
21
  "max_position_embeddings": 2048,
 
24
  "num_hidden_layers": 24,
25
  "pad_token_id": 1,
26
  "scale_embeddings": false,
27
+ "torch_dtype": "float16",
28
+ "transformers_version": "4.21.0.dev0",
29
  "use_cache": true,
30
  "vocab_size": 50000,
31
  "word_embed_proj_dim": 2048
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:97c287bad7929491e81636940b1e2f890e3886d74427e3254ca6fb628aadebce
3
- size 5260928593
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f349eb24e0a15d358137908aa71f88afc580250b3acebef102ab6e8b8cc06f18
3
+ size 2630528157