File size: 785 Bytes
34b2082
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
{
    "module": "keras_hub.src.models.gemma.gemma_backbone",
    "class_name": "GemmaBackbone",
    "config": {
        "name": "gemma_backbone",
        "trainable": true,
        "vocabulary_size": 256000,
        "num_layers": 18,
        "num_query_heads": 8,
        "num_key_value_heads": 1,
        "hidden_dim": 2048,
        "intermediate_dim": 32768,
        "head_dim": 256,
        "layer_norm_epsilon": 1e-06,
        "dropout": 0,
        "query_head_dim_normalize": true,
        "use_post_ffw_norm": false,
        "use_post_attention_norm": false,
        "final_logit_soft_cap": null,
        "attention_logit_soft_cap": null,
        "sliding_window_size": 4096,
        "use_sliding_window_attention": false
    },
    "registered_name": "keras_hub>GemmaBackbone"
}