zpn commited on
Commit
02d9672
1 Parent(s): 4f15897

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -3,7 +3,7 @@
3
  "architectures": [
4
  "NomicBertModel"
5
  ],
6
- "attn_pdrop": 0.1,
7
  "auto_map": {
8
  "AutoConfig": "configuration_hf_nomic_bert.NomicBertConfig",
9
  "AutoModel": "modeling_hf_nomic_bert.NomicBertModel",
@@ -12,7 +12,7 @@
12
  "bos_token_id": null,
13
  "causal": false,
14
  "dense_seq_output": true,
15
- "embd_pdrop": 0.1,
16
  "eos_token_id": null,
17
  "fused_bias_fc": true,
18
  "fused_dropout_add_ln": true,
@@ -32,7 +32,7 @@
32
  "prenorm": false,
33
  "qkv_proj_bias": false,
34
  "reorder_and_upcast_attn": false,
35
- "resid_pdrop": 0.1,
36
  "rotary_emb_base": 1000,
37
  "rotary_emb_fraction": 1.0,
38
  "rotary_emb_interleaved": false,
 
3
  "architectures": [
4
  "NomicBertModel"
5
  ],
6
+ "attn_pdrop": 0.0,
7
  "auto_map": {
8
  "AutoConfig": "configuration_hf_nomic_bert.NomicBertConfig",
9
  "AutoModel": "modeling_hf_nomic_bert.NomicBertModel",
 
12
  "bos_token_id": null,
13
  "causal": false,
14
  "dense_seq_output": true,
15
+ "embd_pdrop": 0.0,
16
  "eos_token_id": null,
17
  "fused_bias_fc": true,
18
  "fused_dropout_add_ln": true,
 
32
  "prenorm": false,
33
  "qkv_proj_bias": false,
34
  "reorder_and_upcast_attn": false,
35
+ "resid_pdrop": 0.0,
36
  "rotary_emb_base": 1000,
37
  "rotary_emb_fraction": 1.0,
38
  "rotary_emb_interleaved": false,