Transformers
Inference Endpoints
exnx commited on
Commit
691f36e
1 Parent(s): 7da4d1b

update tiny config

Browse files
Files changed (1) hide show
  1. config.json +23 -24
config.json CHANGED
@@ -1,26 +1,25 @@
1
  {
2
- "model_type": "hyenadna",
3
- "d_model": 256,
4
- "n_layer": 2,
5
- "d_inner": 1024,
6
- "vocab_size": 12,
7
- "resid_dropout": 0.0,
8
- "embed_dropout": 0.1,
9
- "fused_mlp": false,
10
- "fused_dropout_add_ln": true,
11
- "residual_in_fp32": true,
12
- "pad_vocab_size_multiple": 8,
13
- "return_hidden_state": true,
14
- "layer": {
15
- "_name_": "hyena",
16
- "emb_dim": 5,
17
- "filter_order": 64,
18
- "local_order": 3,
19
- "l_max": 1026,
20
- "modulate": true,
21
- "w": 10,
22
- "lr": 6e-4,
23
- "wd": 0.0,
24
- "lr_pos_emb": 0.0
25
- }
26
  }
 
1
  {
2
+ "d_model": 128,
3
+ "n_layer": 2,
4
+ "d_inner": 512,
5
+ "vocab_size": 12,
6
+ "resid_dropout": 0.0,
7
+ "embed_dropout": 0.1,
8
+ "fused_mlp": false,
9
+ "fused_dropout_add_ln": true,
10
+ "residual_in_fp32": true,
11
+ "pad_vocab_size_multiple": 8,
12
+ "return_hidden_state": true,
13
+ "layer": {
14
+ "_name_": "hyena",
15
+ "emb_dim": 5,
16
+ "filter_order": 64,
17
+ "local_order": 3,
18
+ "l_max": 1026,
19
+ "modulate": true,
20
+ "w": 10,
21
+ "lr": 6e-4,
22
+ "wd": 0.0,
23
+ "lr_pos_emb": 0.0
24
+ }
 
25
  }