Dshalem commited on
Commit
e0ab2c9
1 Parent(s): b69b388

Upload model

Browse files
brad/adapter_config.json CHANGED
@@ -22,134 +22,134 @@
22
  "rank_pattern": {},
23
  "revision": null,
24
  "target_modules": [
25
- "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v",
26
- "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q",
27
- "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v",
28
- "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0",
29
- "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0",
30
- "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out.0",
31
- "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_k",
32
- "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_k",
33
- "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0",
34
- "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0",
35
  "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q",
36
- "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q",
37
- "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v",
38
- "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out.0",
39
  "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0",
40
- "mid_block.attentions.0.transformer_blocks.0.attn1.to_v",
41
- "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k",
42
- "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q",
43
- "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v",
44
- "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v",
45
- "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v",
46
- "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out.0",
47
- "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out.0",
48
- "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q",
49
- "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v",
50
- "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q",
51
- "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0",
52
- "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q",
53
- "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_k",
54
- "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q",
55
  "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v",
 
 
 
 
56
  "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out.0",
57
- "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_k",
58
- "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k",
59
- "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k",
60
- "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_k",
61
- "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k",
62
- "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q",
63
- "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v",
64
- "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q",
65
  "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out.0",
66
- "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v",
67
- "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q",
68
  "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v",
69
- "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q",
70
- "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v",
71
  "mid_block.attentions.0.transformer_blocks.0.attn1.to_out.0",
72
- "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k",
74
- "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q",
 
 
 
 
 
 
75
  "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v",
76
- "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k",
77
- "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q",
78
- "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q",
80
- "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v",
81
  "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k",
82
- "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v",
83
- "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_k",
84
- "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0",
85
  "mid_block.attentions.0.transformer_blocks.0.attn2.to_q",
86
- "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v",
87
  "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v",
88
- "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v",
89
- "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v",
90
- "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q",
91
- "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0",
92
- "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out.0",
93
- "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k",
94
- "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v",
95
- "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v",
96
- "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v",
97
- "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_k",
98
- "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0",
99
- "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k",
100
  "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k",
101
- "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out.0",
 
102
  "mid_block.attentions.0.transformer_blocks.0.attn2.to_v",
103
- "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out.0",
104
- "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q",
105
- "mid_block.attentions.0.transformer_blocks.0.attn1.to_q",
106
- "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q",
107
- "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q",
108
- "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q",
109
- "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q",
110
  "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_k",
111
- "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k",
112
- "mid_block.attentions.0.transformer_blocks.0.attn2.to_out.0",
113
- "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0",
114
- "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_k",
 
 
 
 
 
115
  "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0",
116
- "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v",
117
- "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0",
 
 
 
118
  "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v",
119
- "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out.0",
120
- "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v",
121
- "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0",
122
- "mid_block.attentions.0.transformer_blocks.0.attn1.to_k",
123
- "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q",
124
- "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out.0",
125
- "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_k",
126
- "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v",
127
- "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k",
128
- "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out.0",
129
  "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_k",
130
- "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q",
131
- "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_k",
132
- "mid_block.attentions.0.transformer_blocks.0.attn2.to_k",
133
- "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q",
134
- "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v",
135
- "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k",
136
- "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v",
137
  "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q",
138
- "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v",
139
- "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k",
140
- "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q",
141
- "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v",
142
- "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_k",
143
- "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out.0",
144
- "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q",
145
  "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q",
146
- "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0",
147
- "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_k",
148
- "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0",
149
- "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q",
150
- "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out.0",
 
 
 
151
  "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k",
152
- "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0"
 
 
 
 
 
153
  ],
154
  "task_type": null
155
  }
 
22
  "rank_pattern": {},
23
  "revision": null,
24
  "target_modules": [
25
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out.0",
 
 
 
 
 
 
 
 
 
26
  "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q",
27
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out.0",
 
 
28
  "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v",
30
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0",
31
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q",
32
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v",
33
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0",
34
  "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out.0",
35
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out.0",
36
+ "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_k",
37
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q",
38
+ "mid_block.attentions.0.transformer_blocks.0.attn2.to_out.0",
39
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0",
40
+ "mid_block.attentions.0.transformer_blocks.0.attn2.to_k",
41
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q",
42
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q",
43
  "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out.0",
44
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v",
 
45
  "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v",
 
 
46
  "mid_block.attentions.0.transformer_blocks.0.attn1.to_out.0",
47
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0",
48
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v",
49
+ "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out.0",
50
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k",
51
+ "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out.0",
52
+ "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q",
53
+ "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_k",
54
+ "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q",
55
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v",
56
+ "mid_block.attentions.0.transformer_blocks.0.attn1.to_k",
57
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v",
58
+ "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q",
59
+ "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v",
60
+ "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v",
61
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_k",
62
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q",
63
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q",
64
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v",
65
+ "mid_block.attentions.0.transformer_blocks.0.attn1.to_v",
66
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_k",
67
+ "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v",
68
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k",
69
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k",
70
+ "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0",
71
+ "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_k",
72
  "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k",
73
+ "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k",
74
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0",
75
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_k",
76
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k",
77
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0",
78
+ "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v",
79
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k",
80
  "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v",
81
+ "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q",
82
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out.0",
83
+ "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v",
84
+ "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0",
85
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out.0",
86
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_k",
87
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v",
88
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out.0",
89
+ "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k",
90
+ "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0",
91
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q",
92
+ "down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q",
93
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v",
94
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v",
95
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q",
96
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v",
97
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q",
98
  "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q",
99
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_k",
100
  "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k",
101
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q",
 
 
102
  "mid_block.attentions.0.transformer_blocks.0.attn2.to_q",
103
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k",
104
  "up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v",
105
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v",
106
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_k",
 
 
 
 
 
 
 
 
 
 
107
  "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k",
108
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q",
109
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k",
110
  "mid_block.attentions.0.transformer_blocks.0.attn2.to_v",
 
 
 
 
 
 
 
111
  "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_k",
112
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0",
113
+ "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v",
114
+ "down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0",
115
+ "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q",
116
+ "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q",
117
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v",
118
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_k",
119
+ "down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0",
120
+ "mid_block.attentions.0.transformer_blocks.0.attn1.to_q",
121
  "up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0",
122
+ "up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q",
123
+ "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out.0",
124
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0",
125
+ "down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q",
126
+ "up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k",
127
  "up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v",
128
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v",
 
 
 
 
 
 
 
 
 
129
  "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_k",
130
+ "down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q",
131
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0",
132
+ "up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v",
133
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_k",
134
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q",
135
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q",
 
136
  "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q",
 
 
 
 
 
 
 
137
  "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q",
138
+ "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v",
139
+ "down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v",
140
+ "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v",
141
+ "down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k",
142
+ "up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k",
143
+ "up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q",
144
+ "up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_k",
145
+ "up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v",
146
  "down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k",
147
+ "up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out.0",
148
+ "up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q",
149
+ "up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out.0",
150
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q",
151
+ "up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v",
152
+ "down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out.0"
153
  ],
154
  "task_type": null
155
  }
brad/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:42a623e5009d2baf9923aebaf72a4962103e095f4113d6ce102f78c9cba61b17
3
  size 12795512
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:929f12fefb40be4debb5bf9261020211a3da3cdbd13dcded8275759848392154
3
  size 12795512