Transformers
Inference Endpoints
exnx commited on
Commit
49ba18f
1 Parent(s): 7da4d1b

update tiny config

Browse files
Files changed (1) hide show
  1. config.json +26 -24
config.json CHANGED
@@ -1,26 +1,28 @@
1
  {
2
- "model_type": "hyenadna",
3
- "d_model": 256,
4
- "n_layer": 2,
5
- "d_inner": 1024,
6
- "vocab_size": 12,
7
- "resid_dropout": 0.0,
8
- "embed_dropout": 0.1,
9
- "fused_mlp": false,
10
- "fused_dropout_add_ln": true,
11
- "residual_in_fp32": true,
12
- "pad_vocab_size_multiple": 8,
13
- "return_hidden_state": true,
14
- "layer": {
15
- "_name_": "hyena",
16
- "emb_dim": 5,
17
- "filter_order": 64,
18
- "local_order": 3,
19
- "l_max": 1026,
20
- "modulate": true,
21
- "w": 10,
22
- "lr": 6e-4,
23
- "wd": 0.0,
24
- "lr_pos_emb": 0.0
25
- }
 
 
26
  }
 
1
  {
2
+ "use_head": true,
3
+ "n_classes": 2,
4
+ "d_model": 128,
5
+ "n_layer": 2,
6
+ "d_inner": 512,
7
+ "vocab_size": 12,
8
+ "resid_dropout": 0.0,
9
+ "embed_dropout": 0.1,
10
+ "fused_mlp": false,
11
+ "fused_dropout_add_ln": true,
12
+ "residual_in_fp32": true,
13
+ "pad_vocab_size_multiple": 8,
14
+ "return_hidden_state": true,
15
+ "layer": {
16
+ "_name_": "hyena",
17
+ "d_model": 128,
18
+ "emb_dim": 5,
19
+ "filter_order": 64,
20
+ "local_order": 3,
21
+ "l_max": 1026,
22
+ "modulate": true,
23
+ "w": 10,
24
+ "lr": 6e-4,
25
+ "wd": 0.0,
26
+ "lr_pos_emb": 0.0
27
+ }
28
  }