File size: 1,936 Bytes
ee0ee8b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
{
    "model_parameters": {
        "n_layers": 16,
        "vocab_size": 128256,
        "embed_dim": 2048,
        "ffn_hidden_dim": 8192,
        "head_dim": 64,
        "n_kv_heads": 8,
        "rope_theta": 500000.0,
        "rms_norm_eps": 1e-05,
        "attention_mask_value": -100000.0,
        "tie_embedding": true
    },
    "qnn_parameters": {
        "n_hvx_threads": 4
    },
    "graphs": [
        {
            "type": "transformers",
            "start_layer_id": 0,
            "end_layer_id": 16,
            "batch_size": 1,
            "context_size": 2048,
            "cache_size": 1920,
            "graph_name": "batch_1",
            "model_path": "llama3_2_1b.bin",
            "kv_path_format": "kv/layer_{layer_id}_{kv_type}_{head_id}.raw",
            "kv_size": 13,
            "x_name": "x",
            "out_name": "out"
        },
        {
            "type": "transformers",
            "start_layer_id": 0,
            "end_layer_id": 16,
            "batch_size": 128,
            "context_size": 2048,
            "cache_size": 1920,
            "graph_name": "batch_128",
            "model_path": "llama3_2_1b.bin",
            "kv_path_format": "kv/layer_{layer_id}_{kv_type}_{head_id}.raw",
            "kv_size": 13,
            "x_name": "x",
            "out_name": "out"
        }
    ],
    "embeddings": [
        {
            "graph_name": "batch_1",
            "model_path": "lm_head.bin",
            "batch_size": 1,
            "input_type": 562,
            "input_dim": 2048,
            "output_dim": 128256,
            "x_name": "x",
            "out_name": "logits"
        },
        {
            "graph_name": "batch_128",
            "model_path": "lm_head.bin",
            "batch_size": 128,
            "input_type": 562,
            "input_dim": 2048,
            "output_dim": 128256,
            "x_name": "x",
            "out_name": "logits"
        }
    ]
}