hyenadna-tiny / config.json
ZhiyuanChen's picture
Upload converted HyenaDNA-tiny (16k seqlen, d128)
c03a9bd verified
{
"activation_freq": 10,
"architectures": [
"HyenaDnaForCausalLM"
],
"bos_token_id": 1,
"dtype": "float32",
"embedding_dropout": 0.1,
"eos_token_id": 2,
"filter_dropout": 0.0,
"filter_emb_dim": 5,
"filter_order": 64,
"head": null,
"hidden_dropout": 0.0,
"hidden_size": 128,
"hyena_order": 2,
"id2label": {
"0": "LABEL_0"
},
"initializer_range": 0.02,
"intermediate_size": 512,
"label2id": {
"LABEL_0": 0
},
"layer_norm_eps": 1e-05,
"mask_token_id": 4,
"max_position_embeddings": 16386,
"model_type": "hyenadna",
"null_token_id": 5,
"num_hidden_layers": 2,
"num_inner_mlps": 2,
"pad_token_id": 0,
"pad_vocab_size_multiple": 8,
"short_filter_order": 3,
"tie_word_embeddings": true,
"train_freq": true,
"transformers_version": "5.2.0",
"unk_token_id": 3,
"use_bias": true,
"vocab_size": 11
}