based-360M-32k / config.json
elephantmipt's picture
Upload config.json
ab95ff6 verified
{
"activation_function": "swiglu",
"alt_mixer": {
"_target_": "aicl.model.models.mixers.linear_attention.LinearAttention",
"feature_dim": 16,
"feature_map": {
"_target_": "aicl.model.models.mixers.linear_attention.TaylorExp",
"input_dim": 16
},
"l_max": 32768,
"num_heads": 16
},
"alt_mixer_2": {
"_target_": "aicl.model.models.mixers.slide_attention.SlidingAttention",
"causal": true,
"num_heads": 16,
"window_size": 128
},
"alt_mixer_2_layers": [
2,
7,
12,
17,
22
],
"alt_mixer_layers": [
1,
6,
11,
16,
21
],
"attn_pdrop": 0,
"bos_token_id": 50256,
"embd_pdrop": 0,
"eos_token_id": 50256,
"fused_bias_fc": true,
"fused_dropout_add_ln": true,
"fused_mlp": false,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"mixer": {
"_target_": "aicl.model.models.mixers.convolution.BaseConv",
"expand_proj": 4,
"kernel_sizes": 3,
"l_max": 32768,
"use_bias": true
},
"mlp_fc1_bias": false,
"mlp_fc2_bias": false,
"model_type": "gpt2",
"n_embd": 1024,
"n_head": 16,
"n_inner": 2048,
"n_layer": 27,
"n_positions": 0,
"out_proj_bias": false,
"pad_vocab_size_multiple": 8,
"qkv_proj_bias": false,
"reorder_and_upcast_attn": false,
"resid_pdrop": 0,
"residual_in_fp32": true,
"rms_norm": true,
"rotary_emb_fraction": 1,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"special_initializer": true,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"transformers_version": "4.38.2",
"use_cache": true,
"use_flash_attn": true,
"vocab_size": 50280
}