valhalla commited on
Commit
763a667
1 Parent(s): 146b5f5

update ckpt

Browse files
Files changed (2) hide show
  1. config.json +5 -0
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,10 +1,12 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
4
  "architectures": [
5
  "XGLMForCausalLM"
6
  ],
7
  "attention_dropout": 0.1,
 
8
  "bos_token_id": 0,
9
  "classifier_dropout": 0.0,
10
  "d_model": 1024,
@@ -16,9 +18,12 @@
16
  "dropout": 0.1,
17
  "encoder_layerdrop": 0.0,
18
  "eos_token_id": 2,
 
19
  "init_std": 0.02,
 
20
  "max_position_embeddings": 2048,
21
  "model_type": "xglm",
 
22
  "pad_token_id": 1,
23
  "scale_embedding": true,
24
  "torch_dtype": "float32",
 
1
  {
2
+ "_name_or_path": "xglm-564M/",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
6
  "XGLMForCausalLM"
7
  ],
8
  "attention_dropout": 0.1,
9
+ "attention_heads": 16,
10
  "bos_token_id": 0,
11
  "classifier_dropout": 0.0,
12
  "d_model": 1024,
 
18
  "dropout": 0.1,
19
  "encoder_layerdrop": 0.0,
20
  "eos_token_id": 2,
21
+ "ffn_dim": 4096,
22
  "init_std": 0.02,
23
+ "layerdrop": 0.0,
24
  "max_position_embeddings": 2048,
25
  "model_type": "xglm",
26
+ "num_layers": 24,
27
  "pad_token_id": 1,
28
  "scale_embedding": true,
29
  "torch_dtype": "float32",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d5716c236039c09e444157177c11051f84f36a615105a04cb93fadf66faacbd
3
  size 2266383929
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33de0db4f71eed508d93035d14a6fd4d5b61c4bdfbb72f6c3d01f2ffdc7bff7c
3
  size 2266383929