PerryCheng614
commited on
Upload folder using huggingface_hub
Browse files- config.json +62 -0
- model.safetensors +3 -0
- quant_log.csv +113 -0
- quantize_config.json +21 -0
config.json
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"_name_or_path": "meta-llama/Llama-3.2-1B-Instruct",
|
4 |
+
"architectures": [
|
5 |
+
"LlamaForCausalLM"
|
6 |
+
],
|
7 |
+
"attention_bias": false,
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 128000,
|
10 |
+
"eos_token_id": [
|
11 |
+
128001,
|
12 |
+
128008,
|
13 |
+
128009
|
14 |
+
],
|
15 |
+
"head_dim": 64,
|
16 |
+
"hidden_act": "silu",
|
17 |
+
"hidden_size": 2048,
|
18 |
+
"initializer_range": 0.02,
|
19 |
+
"intermediate_size": 8192,
|
20 |
+
"max_position_embeddings": 131072,
|
21 |
+
"mlp_bias": false,
|
22 |
+
"model_type": "llama",
|
23 |
+
"num_attention_heads": 32,
|
24 |
+
"num_hidden_layers": 16,
|
25 |
+
"num_key_value_heads": 8,
|
26 |
+
"pretraining_tp": 1,
|
27 |
+
"quantization_config": {
|
28 |
+
"bits": 4,
|
29 |
+
"checkpoint_format": "gptq",
|
30 |
+
"desc_act": true,
|
31 |
+
"dynamic": null,
|
32 |
+
"group_size": 32,
|
33 |
+
"lm_head": false,
|
34 |
+
"meta": {
|
35 |
+
"damp_auto_increment": 0.0025,
|
36 |
+
"damp_percent": 0.01,
|
37 |
+
"mse": 0.0,
|
38 |
+
"quantizer": [
|
39 |
+
"gptqmodel:1.4.6-dev"
|
40 |
+
],
|
41 |
+
"static_groups": false,
|
42 |
+
"true_sequential": true,
|
43 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
44 |
+
},
|
45 |
+
"quant_method": "gptq",
|
46 |
+
"sym": true
|
47 |
+
},
|
48 |
+
"rms_norm_eps": 1e-05,
|
49 |
+
"rope_scaling": {
|
50 |
+
"factor": 32.0,
|
51 |
+
"high_freq_factor": 4.0,
|
52 |
+
"low_freq_factor": 1.0,
|
53 |
+
"original_max_position_embeddings": 8192,
|
54 |
+
"rope_type": "llama3"
|
55 |
+
},
|
56 |
+
"rope_theta": 500000.0,
|
57 |
+
"tie_word_embeddings": true,
|
58 |
+
"torch_dtype": "bfloat16",
|
59 |
+
"transformers_version": "4.47.1",
|
60 |
+
"use_cache": true,
|
61 |
+
"vocab_size": 128256
|
62 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62814e6a3861364aeb6c9f0409990c2a72817f8cd8fd72c498b0ea637523673f
|
3 |
+
size 1614733472
|
quant_log.csv
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,damp,time
|
2 |
+
0,self_attn.k_proj,0.02168,0.01000,0.732
|
3 |
+
0,self_attn.v_proj,0.00053,0.01000,0.632
|
4 |
+
0,self_attn.q_proj,0.04424,0.01000,0.614
|
5 |
+
0,self_attn.o_proj,0.00005,0.01000,0.607
|
6 |
+
0,mlp.up_proj,0.03671,0.01000,0.636
|
7 |
+
0,mlp.gate_proj,0.04622,0.01000,0.776
|
8 |
+
0,mlp.down_proj,0.00027,0.01000,2.563
|
9 |
+
1,self_attn.k_proj,0.03438,0.01000,0.654
|
10 |
+
1,self_attn.v_proj,0.00195,0.01000,0.643
|
11 |
+
1,self_attn.q_proj,0.06312,0.01000,0.658
|
12 |
+
1,self_attn.o_proj,0.00014,0.01000,0.665
|
13 |
+
1,mlp.up_proj,0.05251,0.01000,0.660
|
14 |
+
1,mlp.gate_proj,0.07196,0.01000,0.793
|
15 |
+
1,mlp.down_proj,0.13592,0.01000,2.671
|
16 |
+
2,self_attn.k_proj,0.06812,0.01000,0.635
|
17 |
+
2,self_attn.v_proj,0.00459,0.01000,0.633
|
18 |
+
2,self_attn.q_proj,0.13188,0.01000,0.648
|
19 |
+
2,self_attn.o_proj,0.00021,0.01000,0.653
|
20 |
+
2,mlp.up_proj,0.06672,0.01000,0.667
|
21 |
+
2,mlp.gate_proj,0.10655,0.01000,0.795
|
22 |
+
2,mlp.down_proj,0.00084,0.01000,2.664
|
23 |
+
3,self_attn.k_proj,0.04493,0.01000,0.652
|
24 |
+
3,self_attn.v_proj,0.00586,0.01000,0.620
|
25 |
+
3,self_attn.q_proj,0.09710,0.01000,0.623
|
26 |
+
3,self_attn.o_proj,0.00041,0.01000,0.633
|
27 |
+
3,mlp.up_proj,0.08134,0.01000,0.654
|
28 |
+
3,mlp.gate_proj,0.16285,0.01000,0.647
|
29 |
+
3,mlp.down_proj,0.00128,0.01000,2.671
|
30 |
+
4,self_attn.k_proj,0.04807,0.01000,0.604
|
31 |
+
4,self_attn.v_proj,0.00549,0.01000,0.591
|
32 |
+
4,self_attn.q_proj,0.09806,0.01000,0.611
|
33 |
+
4,self_attn.o_proj,0.00059,0.01000,0.626
|
34 |
+
4,mlp.up_proj,0.08266,0.01000,0.643
|
35 |
+
4,mlp.gate_proj,0.17954,0.01000,0.627
|
36 |
+
4,mlp.down_proj,0.00149,0.01000,2.526
|
37 |
+
5,self_attn.k_proj,0.07132,0.01000,0.603
|
38 |
+
5,self_attn.v_proj,0.00474,0.01000,0.623
|
39 |
+
5,self_attn.q_proj,0.12509,0.01000,0.610
|
40 |
+
5,self_attn.o_proj,0.00061,0.01000,0.634
|
41 |
+
5,mlp.up_proj,0.09010,0.01000,0.655
|
42 |
+
5,mlp.gate_proj,0.16730,0.01000,0.623
|
43 |
+
5,mlp.down_proj,0.00180,0.01000,2.580
|
44 |
+
6,self_attn.k_proj,0.05819,0.01000,0.615
|
45 |
+
6,self_attn.v_proj,0.00612,0.01000,0.594
|
46 |
+
6,self_attn.q_proj,0.09133,0.01000,0.603
|
47 |
+
6,self_attn.o_proj,0.00093,0.01000,0.615
|
48 |
+
6,mlp.up_proj,0.09097,0.01000,0.661
|
49 |
+
6,mlp.gate_proj,0.16461,0.01000,0.752
|
50 |
+
6,mlp.down_proj,0.00181,0.01000,2.506
|
51 |
+
7,self_attn.k_proj,0.05784,0.01000,0.603
|
52 |
+
7,self_attn.v_proj,0.00698,0.01000,0.590
|
53 |
+
7,self_attn.q_proj,0.10795,0.01000,0.609
|
54 |
+
7,self_attn.o_proj,0.00090,0.01000,0.631
|
55 |
+
7,mlp.up_proj,0.09403,0.01000,0.653
|
56 |
+
7,mlp.gate_proj,0.15263,0.01000,0.650
|
57 |
+
7,mlp.down_proj,0.00196,0.01000,2.499
|
58 |
+
8,self_attn.k_proj,0.06733,0.01000,0.591
|
59 |
+
8,self_attn.v_proj,0.00663,0.01000,0.594
|
60 |
+
8,self_attn.q_proj,0.10911,0.01000,0.611
|
61 |
+
8,self_attn.o_proj,0.00114,0.01000,0.620
|
62 |
+
8,mlp.up_proj,0.10367,0.01000,0.642
|
63 |
+
8,mlp.gate_proj,0.16448,0.01000,0.628
|
64 |
+
8,mlp.down_proj,0.00263,0.01000,2.473
|
65 |
+
9,self_attn.k_proj,0.05555,0.01000,0.597
|
66 |
+
9,self_attn.v_proj,0.00782,0.01000,0.598
|
67 |
+
9,self_attn.q_proj,0.13325,0.01000,0.625
|
68 |
+
9,self_attn.o_proj,0.00155,0.01000,0.632
|
69 |
+
9,mlp.up_proj,0.11070,0.01000,0.666
|
70 |
+
9,mlp.gate_proj,0.18121,0.01000,0.649
|
71 |
+
9,mlp.down_proj,0.00325,0.01000,2.563
|
72 |
+
10,self_attn.k_proj,0.06845,0.01000,0.592
|
73 |
+
10,self_attn.v_proj,0.00844,0.01000,0.592
|
74 |
+
10,self_attn.q_proj,0.13737,0.01000,0.607
|
75 |
+
10,self_attn.o_proj,0.00105,0.01000,0.600
|
76 |
+
10,mlp.up_proj,0.13314,0.01000,0.625
|
77 |
+
10,mlp.gate_proj,0.21042,0.01000,0.613
|
78 |
+
10,mlp.down_proj,0.00412,0.01000,2.506
|
79 |
+
11,self_attn.k_proj,0.08247,0.01000,0.590
|
80 |
+
11,self_attn.v_proj,0.00849,0.01000,0.594
|
81 |
+
11,self_attn.q_proj,0.13637,0.01000,0.599
|
82 |
+
11,self_attn.o_proj,0.00090,0.01000,0.601
|
83 |
+
11,mlp.up_proj,0.14766,0.01000,0.640
|
84 |
+
11,mlp.gate_proj,0.22769,0.01000,0.632
|
85 |
+
11,mlp.down_proj,0.00453,0.01000,2.479
|
86 |
+
12,self_attn.k_proj,0.08029,0.01000,0.581
|
87 |
+
12,self_attn.v_proj,0.00880,0.01000,0.579
|
88 |
+
12,self_attn.q_proj,0.13529,0.01000,0.603
|
89 |
+
12,self_attn.o_proj,0.00088,0.01000,0.585
|
90 |
+
12,mlp.up_proj,0.15437,0.01000,0.612
|
91 |
+
12,mlp.gate_proj,0.22473,0.01000,0.610
|
92 |
+
12,mlp.down_proj,0.00506,0.01000,2.522
|
93 |
+
13,self_attn.k_proj,0.07901,0.01000,0.610
|
94 |
+
13,self_attn.v_proj,0.01435,0.01000,0.578
|
95 |
+
13,self_attn.q_proj,0.15220,0.01000,0.602
|
96 |
+
13,self_attn.o_proj,0.00121,0.01000,0.613
|
97 |
+
13,mlp.up_proj,0.18113,0.01000,0.642
|
98 |
+
13,mlp.gate_proj,0.24019,0.01000,0.611
|
99 |
+
13,mlp.down_proj,0.00736,0.01000,2.476
|
100 |
+
14,self_attn.k_proj,0.08390,0.01000,0.579
|
101 |
+
14,self_attn.v_proj,0.03109,0.01000,0.576
|
102 |
+
14,self_attn.q_proj,0.16015,0.01000,0.601
|
103 |
+
14,self_attn.o_proj,0.00296,0.01000,0.600
|
104 |
+
14,mlp.up_proj,0.20902,0.01000,0.663
|
105 |
+
14,mlp.gate_proj,0.29858,0.01000,0.608
|
106 |
+
14,mlp.down_proj,0.00978,0.01000,2.525
|
107 |
+
15,self_attn.k_proj,0.07619,0.01000,0.582
|
108 |
+
15,self_attn.v_proj,0.03300,0.01000,0.577
|
109 |
+
15,self_attn.q_proj,0.14672,0.01000,0.598
|
110 |
+
15,self_attn.o_proj,0.00982,0.01000,0.612
|
111 |
+
15,mlp.up_proj,0.25285,0.01000,0.633
|
112 |
+
15,mlp.gate_proj,0.33463,0.01000,0.617
|
113 |
+
15,mlp.down_proj,0.02326,0.01000,2.467
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"dynamic": null,
|
4 |
+
"group_size": 32,
|
5 |
+
"desc_act": true,
|
6 |
+
"sym": true,
|
7 |
+
"lm_head": false,
|
8 |
+
"quant_method": "gptq",
|
9 |
+
"checkpoint_format": "gptq",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:1.4.6-dev"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|