typeof commited on
Commit
b71a124
1 Parent(s): d80a211

Create model.layer_map

Browse files
Files changed (1) hide show
  1. model.layer_map +112 -0
model.layer_map ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model.model.model.layers.0.mlp.down_proj.lora_A.weight
2
+ base_model.model.model.layers.0.mlp.down_proj.lora_B.weight
3
+ base_model.model.model.layers.0.mlp.gate_proj.lora_A.weight
4
+ base_model.model.model.layers.0.mlp.gate_proj.lora_B.weight
5
+ base_model.model.model.layers.0.mlp.up_proj.lora_A.weight
6
+ base_model.model.model.layers.0.mlp.up_proj.lora_B.weight
7
+ base_model.model.model.layers.0.self_attn.k_proj.lora_A.weight
8
+ base_model.model.model.layers.0.self_attn.k_proj.lora_B.weight
9
+ base_model.model.model.layers.0.self_attn.o_proj.lora_A.weight
10
+ base_model.model.model.layers.0.self_attn.o_proj.lora_B.weight
11
+ base_model.model.model.layers.0.self_attn.q_proj.lora_A.weight
12
+ base_model.model.model.layers.0.self_attn.q_proj.lora_B.weight
13
+ base_model.model.model.layers.0.self_attn.v_proj.lora_A.weight
14
+ base_model.model.model.layers.0.self_attn.v_proj.lora_B.weight
15
+ base_model.model.model.layers.1.mlp.down_proj.lora_A.weight
16
+ base_model.model.model.layers.1.mlp.down_proj.lora_B.weight
17
+ base_model.model.model.layers.1.mlp.gate_proj.lora_A.weight
18
+ base_model.model.model.layers.1.mlp.gate_proj.lora_B.weight
19
+ base_model.model.model.layers.1.mlp.up_proj.lora_A.weight
20
+ base_model.model.model.layers.1.mlp.up_proj.lora_B.weight
21
+ base_model.model.model.layers.1.self_attn.k_proj.lora_A.weight
22
+ base_model.model.model.layers.1.self_attn.k_proj.lora_B.weight
23
+ base_model.model.model.layers.1.self_attn.o_proj.lora_A.weight
24
+ base_model.model.model.layers.1.self_attn.o_proj.lora_B.weight
25
+ base_model.model.model.layers.1.self_attn.q_proj.lora_A.weight
26
+ base_model.model.model.layers.1.self_attn.q_proj.lora_B.weight
27
+ base_model.model.model.layers.1.self_attn.v_proj.lora_A.weight
28
+ base_model.model.model.layers.1.self_attn.v_proj.lora_B.weight
29
+ base_model.model.model.layers.2.mlp.down_proj.lora_A.weight
30
+ base_model.model.model.layers.2.mlp.down_proj.lora_B.weight
31
+ base_model.model.model.layers.2.mlp.gate_proj.lora_A.weight
32
+ base_model.model.model.layers.2.mlp.gate_proj.lora_B.weight
33
+ base_model.model.model.layers.2.mlp.up_proj.lora_A.weight
34
+ base_model.model.model.layers.2.mlp.up_proj.lora_B.weight
35
+ base_model.model.model.layers.2.self_attn.k_proj.lora_A.weight
36
+ base_model.model.model.layers.2.self_attn.k_proj.lora_B.weight
37
+ base_model.model.model.layers.2.self_attn.o_proj.lora_A.weight
38
+ base_model.model.model.layers.2.self_attn.o_proj.lora_B.weight
39
+ base_model.model.model.layers.2.self_attn.q_proj.lora_A.weight
40
+ base_model.model.model.layers.2.self_attn.q_proj.lora_B.weight
41
+ base_model.model.model.layers.2.self_attn.v_proj.lora_A.weight
42
+ base_model.model.model.layers.2.self_attn.v_proj.lora_B.weight
43
+ base_model.model.model.layers.3.mlp.down_proj.lora_A.weight
44
+ base_model.model.model.layers.3.mlp.down_proj.lora_B.weight
45
+ base_model.model.model.layers.3.mlp.gate_proj.lora_A.weight
46
+ base_model.model.model.layers.3.mlp.gate_proj.lora_B.weight
47
+ base_model.model.model.layers.3.mlp.up_proj.lora_A.weight
48
+ base_model.model.model.layers.3.mlp.up_proj.lora_B.weight
49
+ base_model.model.model.layers.3.self_attn.k_proj.lora_A.weight
50
+ base_model.model.model.layers.3.self_attn.k_proj.lora_B.weight
51
+ base_model.model.model.layers.3.self_attn.o_proj.lora_A.weight
52
+ base_model.model.model.layers.3.self_attn.o_proj.lora_B.weight
53
+ base_model.model.model.layers.3.self_attn.q_proj.lora_A.weight
54
+ base_model.model.model.layers.3.self_attn.q_proj.lora_B.weight
55
+ base_model.model.model.layers.3.self_attn.v_proj.lora_A.weight
56
+ base_model.model.model.layers.3.self_attn.v_proj.lora_B.weight
57
+ base_model.model.model.layers.4.mlp.down_proj.lora_A.weight
58
+ base_model.model.model.layers.4.mlp.down_proj.lora_B.weight
59
+ base_model.model.model.layers.4.mlp.gate_proj.lora_A.weight
60
+ base_model.model.model.layers.4.mlp.gate_proj.lora_B.weight
61
+ base_model.model.model.layers.4.mlp.up_proj.lora_A.weight
62
+ base_model.model.model.layers.4.mlp.up_proj.lora_B.weight
63
+ base_model.model.model.layers.4.self_attn.k_proj.lora_A.weight
64
+ base_model.model.model.layers.4.self_attn.k_proj.lora_B.weight
65
+ base_model.model.model.layers.4.self_attn.o_proj.lora_A.weight
66
+ base_model.model.model.layers.4.self_attn.o_proj.lora_B.weight
67
+ base_model.model.model.layers.4.self_attn.q_proj.lora_A.weight
68
+ base_model.model.model.layers.4.self_attn.q_proj.lora_B.weight
69
+ base_model.model.model.layers.4.self_attn.v_proj.lora_A.weight
70
+ base_model.model.model.layers.4.self_attn.v_proj.lora_B.weight
71
+ base_model.model.model.layers.5.mlp.down_proj.lora_A.weight
72
+ base_model.model.model.layers.5.mlp.down_proj.lora_B.weight
73
+ base_model.model.model.layers.5.mlp.gate_proj.lora_A.weight
74
+ base_model.model.model.layers.5.mlp.gate_proj.lora_B.weight
75
+ base_model.model.model.layers.5.mlp.up_proj.lora_A.weight
76
+ base_model.model.model.layers.5.mlp.up_proj.lora_B.weight
77
+ base_model.model.model.layers.5.self_attn.k_proj.lora_A.weight
78
+ base_model.model.model.layers.5.self_attn.k_proj.lora_B.weight
79
+ base_model.model.model.layers.5.self_attn.o_proj.lora_A.weight
80
+ base_model.model.model.layers.5.self_attn.o_proj.lora_B.weight
81
+ base_model.model.model.layers.5.self_attn.q_proj.lora_A.weight
82
+ base_model.model.model.layers.5.self_attn.q_proj.lora_B.weight
83
+ base_model.model.model.layers.5.self_attn.v_proj.lora_A.weight
84
+ base_model.model.model.layers.5.self_attn.v_proj.lora_B.weight
85
+ base_model.model.model.layers.6.mlp.down_proj.lora_A.weight
86
+ base_model.model.model.layers.6.mlp.down_proj.lora_B.weight
87
+ base_model.model.model.layers.6.mlp.gate_proj.lora_A.weight
88
+ base_model.model.model.layers.6.mlp.gate_proj.lora_B.weight
89
+ base_model.model.model.layers.6.mlp.up_proj.lora_A.weight
90
+ base_model.model.model.layers.6.mlp.up_proj.lora_B.weight
91
+ base_model.model.model.layers.6.self_attn.k_proj.lora_A.weight
92
+ base_model.model.model.layers.6.self_attn.k_proj.lora_B.weight
93
+ base_model.model.model.layers.6.self_attn.o_proj.lora_A.weight
94
+ base_model.model.model.layers.6.self_attn.o_proj.lora_B.weight
95
+ base_model.model.model.layers.6.self_attn.q_proj.lora_A.weight
96
+ base_model.model.model.layers.6.self_attn.q_proj.lora_B.weight
97
+ base_model.model.model.layers.6.self_attn.v_proj.lora_A.weight
98
+ base_model.model.model.layers.6.self_attn.v_proj.lora_B.weight
99
+ base_model.model.model.layers.7.mlp.down_proj.lora_A.weight
100
+ base_model.model.model.layers.7.mlp.down_proj.lora_B.weight
101
+ base_model.model.model.layers.7.mlp.gate_proj.lora_A.weight
102
+ base_model.model.model.layers.7.mlp.gate_proj.lora_B.weight
103
+ base_model.model.model.layers.7.mlp.up_proj.lora_A.weight
104
+ base_model.model.model.layers.7.mlp.up_proj.lora_B.weight
105
+ base_model.model.model.layers.7.self_attn.k_proj.lora_A.weight
106
+ base_model.model.model.layers.7.self_attn.k_proj.lora_B.weight
107
+ base_model.model.model.layers.7.self_attn.o_proj.lora_A.weight
108
+ base_model.model.model.layers.7.self_attn.o_proj.lora_B.weight
109
+ base_model.model.model.layers.7.self_attn.q_proj.lora_A.weight
110
+ base_model.model.model.layers.7.self_attn.q_proj.lora_B.weight
111
+ base_model.model.model.layers.7.self_attn.v_proj.lora_A.weight
112
+ base_model.model.model.layers.7.self_attn.v_proj.lora_B.weight