fahimfarhan's picture
:tada: Push model for window size 4000 from huggingface space
95a3419 verified
raw
history blame
1.22 kB
{
"_name_or_path": "LongSafari/hyenadna-small-32k-seqlen-hf",
"activation_freq": 10,
"architectures": [
"HyenaDNAForSequenceClassification"
],
"auto_map": {
"AutoConfig": "LongSafari/hyenadna-small-32k-seqlen-hf--configuration_hyena.HyenaConfig",
"AutoModel": "LongSafari/hyenadna-small-32k-seqlen-hf--modeling_hyena.HyenaDNAModel",
"AutoModelForCausalLM": "LongSafari/hyenadna-small-32k-seqlen-hf--modeling_hyena.HyenaDNAForCausalLM",
"AutoModelForSequenceClassification": "LongSafari/hyenadna-small-32k-seqlen-hf--modeling_hyena.HyenaDNAForSequenceClassification"
},
"d_inner": 1024,
"d_model": 256,
"emb_dim": 5,
"embed_dropout": 0.1,
"filter_order": 64,
"hyena_dropout": 0.0,
"hyena_filter_dropout": 0.0,
"hyena_order": 2,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"max_seq_len": 32770,
"model_type": "hyenadna",
"n_layer": 4,
"num_inner_mlps": 2,
"pad_token_id": 4,
"pad_vocab_size_multiple": 8,
"problem_type": "single_label_classification",
"short_filter_order": 3,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"train_freq": true,
"transformers_version": "4.44.2",
"use_bias": true,
"vocab_size": 12
}