robinzixuan commited on
Commit
ae5ccde
1 Parent(s): c052bbf

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -2
config.json CHANGED
@@ -6,7 +6,8 @@
6
  ],
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 2,
9
- "do_layer_norm_before": true,
 
10
  "dropout": 0.1,
11
  "enable_bias": true,
12
  "eos_token_id": 2,
@@ -24,5 +25,11 @@
24
  "transformers_version": "4.31.0",
25
  "use_cache": true,
26
  "vocab_size": 50272,
27
- "word_embed_proj_dim": 768
 
 
 
 
 
 
28
  }
 
6
  ],
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 2,
9
+ "do_layer_norm_before": true
10
+ ,
11
  "dropout": 0.1,
12
  "enable_bias": true,
13
  "eos_token_id": 2,
 
25
  "transformers_version": "4.31.0",
26
  "use_cache": true,
27
  "vocab_size": 50272,
28
+ "word_embed_proj_dim": 768,
29
+ "auto_map":
30
+ {"AutoConfig": "configuration_opt.OPTConfig",
31
+ "AutoModel": "modeling_opt.OPTModel",
32
+ "AutoModelForCausalLM": "modeling_opt.OPTForCausalLM"
33
+
34
+ }
35
  }