arisha07 commited on
Commit
863fa44
1 Parent(s): 60b2c8f

Upload 19 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ INT8/unet_controlnet_int8_NPU.blob filter=lfs diff=lfs merge=lfs -text
FP16/controlnet-depth.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b816cccaa3a7a4bf06635a16e9c3c433defde7ee24cb48c37dcdc5c345a12d85
3
+ size 722537148
FP16/controlnet-depth.xml ADDED
The diff for this file is too large to render. See raw diff
 
FP16/text_encoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2806008ae43e38b26bd53188afe3b2f754a734e698468ebcd500c11c4f601be1
3
+ size 246121804
FP16/text_encoder.xml ADDED
The diff for this file is too large to render. See raw diff
 
FP16/unet_controlnet.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:275056a33a519a737e940bcd045f784b516fdbe7baeef986701a62efacc44b0a
3
+ size 1719042612
FP16/unet_controlnet.xml ADDED
The diff for this file is too large to render. See raw diff
 
FP16/vae_decoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76cc46b58fde57f076ca27414210e3d43380a051e612a7e863b4691d9ca3030c
3
+ size 98980680
FP16/vae_decoder.xml ADDED
The diff for this file is too large to render. See raw diff
 
INT8/controlnet-depth.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b816cccaa3a7a4bf06635a16e9c3c433defde7ee24cb48c37dcdc5c345a12d85
3
+ size 722537148
INT8/controlnet-depth.xml ADDED
The diff for this file is too large to render. See raw diff
 
INT8/text_encoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2806008ae43e38b26bd53188afe3b2f754a734e698468ebcd500c11c4f601be1
3
+ size 246121804
INT8/text_encoder.xml ADDED
The diff for this file is too large to render. See raw diff
 
INT8/unet_controlnet_int8.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e76d6426134be952140fed5847fe45f3793ad88fb3df90bbf471b324c3de57cc
3
+ size 862161348
INT8/unet_controlnet_int8.xml ADDED
The diff for this file is too large to render. See raw diff
 
INT8/unet_controlnet_int8_NPU.blob ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:289973ffa71fd289a9ba150afb48fe6770ff1d7d92d218afd4603f203fcc37a1
3
+ size 928567160
INT8/unet_time_proj_sym.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:585fe71d7f1e1e2871d45528a95012ab00b495d7547d834f8043cb37f8b439da
3
+ size 420568
INT8/unet_time_proj_sym.xml ADDED
@@ -0,0 +1,525 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model2" version="11">
3
+ <layers>
4
+ <layer id="0" name="timestep" type="Parameter" version="opset1">
5
+ <data shape="" element_type="i64" />
6
+ <rt_info>
7
+ <attribute name="old_api_map_element_type" version="0" value="i32" />
8
+ </rt_info>
9
+ <output>
10
+ <port id="0" precision="I64" names="timestep" />
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="/Constant384272028" type="Const" version="opset1">
14
+ <data element_type="i64" shape="1" offset="0" size="8" />
15
+ <output>
16
+ <port id="0" precision="I64" names="/Constant_output_0">
17
+ <dim>1</dim>
18
+ </port>
19
+ </output>
20
+ </layer>
21
+ <layer id="2" name="/Unsqueeze" type="Unsqueeze" version="opset1">
22
+ <input>
23
+ <port id="0" precision="I64" />
24
+ <port id="1" precision="I64">
25
+ <dim>1</dim>
26
+ </port>
27
+ </input>
28
+ <output>
29
+ <port id="2" precision="I64" names="/Cast_output_0,/Unsqueeze_output_0">
30
+ <dim>1</dim>
31
+ </port>
32
+ </output>
33
+ </layer>
34
+ <layer id="3" name="/Where3844" type="Const" version="opset1">
35
+ <data element_type="i64" shape="1" offset="8" size="8" />
36
+ <output>
37
+ <port id="0" precision="I64" names="/Where_output_0">
38
+ <dim>1</dim>
39
+ </port>
40
+ </output>
41
+ </layer>
42
+ <layer id="4" name="/Expand" type="Broadcast" version="opset3">
43
+ <data mode="bidirectional" />
44
+ <input>
45
+ <port id="0" precision="I64">
46
+ <dim>1</dim>
47
+ </port>
48
+ <port id="1" precision="I64">
49
+ <dim>1</dim>
50
+ </port>
51
+ </input>
52
+ <output>
53
+ <port id="2" precision="I64" names="/Expand_output_0">
54
+ <dim>1</dim>
55
+ </port>
56
+ </output>
57
+ </layer>
58
+ <layer id="5" name="/time_proj/Constant384670198" type="Const" version="opset1">
59
+ <data element_type="i64" shape="1" offset="8" size="8" />
60
+ <output>
61
+ <port id="0" precision="I64" names="/time_proj/Constant_output_0">
62
+ <dim>1</dim>
63
+ </port>
64
+ </output>
65
+ </layer>
66
+ <layer id="6" name="/time_proj/Unsqueeze" type="Unsqueeze" version="opset1">
67
+ <input>
68
+ <port id="0" precision="I64">
69
+ <dim>1</dim>
70
+ </port>
71
+ <port id="1" precision="I64">
72
+ <dim>1</dim>
73
+ </port>
74
+ </input>
75
+ <output>
76
+ <port id="2" precision="I64" names="/time_proj/Unsqueeze_output_0">
77
+ <dim>1</dim>
78
+ <dim>1</dim>
79
+ </port>
80
+ </output>
81
+ </layer>
82
+ <layer id="7" name="/time_proj/Cast" type="Convert" version="opset1">
83
+ <data destination_type="f32" />
84
+ <input>
85
+ <port id="0" precision="I64">
86
+ <dim>1</dim>
87
+ <dim>1</dim>
88
+ </port>
89
+ </input>
90
+ <output>
91
+ <port id="1" precision="FP32" names="/time_proj/Cast_output_0">
92
+ <dim>1</dim>
93
+ <dim>1</dim>
94
+ </port>
95
+ </output>
96
+ </layer>
97
+ <layer id="8" name="/time_proj/Constant_1384972730" type="Const" version="opset1">
98
+ <data element_type="f32" shape="1, 160" offset="16" size="640" />
99
+ <output>
100
+ <port id="0" precision="FP32" names="/time_proj/Constant_1_output_0">
101
+ <dim>1</dim>
102
+ <dim>160</dim>
103
+ </port>
104
+ </output>
105
+ </layer>
106
+ <layer id="9" name="/time_proj/Mul" type="Multiply" version="opset1">
107
+ <data auto_broadcast="numpy" />
108
+ <input>
109
+ <port id="0" precision="FP32">
110
+ <dim>1</dim>
111
+ <dim>1</dim>
112
+ </port>
113
+ <port id="1" precision="FP32">
114
+ <dim>1</dim>
115
+ <dim>160</dim>
116
+ </port>
117
+ </input>
118
+ <output>
119
+ <port id="2" precision="FP32" names="/time_proj/Mul_output_0">
120
+ <dim>1</dim>
121
+ <dim>160</dim>
122
+ </port>
123
+ </output>
124
+ </layer>
125
+ <layer id="10" name="/time_proj/Sin" type="Sin" version="opset1">
126
+ <input>
127
+ <port id="0" precision="FP32">
128
+ <dim>1</dim>
129
+ <dim>160</dim>
130
+ </port>
131
+ </input>
132
+ <output>
133
+ <port id="1" precision="FP32" names="/time_proj/Sin_output_0">
134
+ <dim>1</dim>
135
+ <dim>160</dim>
136
+ </port>
137
+ </output>
138
+ </layer>
139
+ <layer id="11" name="250762508072964" type="Const" version="opset1">
140
+ <data element_type="f32" shape="" offset="656" size="4" />
141
+ <output>
142
+ <port id="0" precision="FP32" />
143
+ </output>
144
+ </layer>
145
+ <layer id="12" name="250772508175535" type="Const" version="opset1">
146
+ <data element_type="f32" shape="" offset="660" size="4" />
147
+ <output>
148
+ <port id="0" precision="FP32" />
149
+ </output>
150
+ </layer>
151
+ <layer id="13" name="250782508270750" type="Const" version="opset1">
152
+ <data element_type="f32" shape="" offset="656" size="4" />
153
+ <output>
154
+ <port id="0" precision="FP32" />
155
+ </output>
156
+ </layer>
157
+ <layer id="14" name="250792508378268" type="Const" version="opset1">
158
+ <data element_type="f32" shape="" offset="660" size="4" />
159
+ <output>
160
+ <port id="0" precision="FP32" />
161
+ </output>
162
+ </layer>
163
+ <layer id="15" name="/time_proj/Concat/fq_input_0" type="FakeQuantize" version="opset1">
164
+ <data levels="256" auto_broadcast="numpy" />
165
+ <input>
166
+ <port id="0" precision="FP32">
167
+ <dim>1</dim>
168
+ <dim>160</dim>
169
+ </port>
170
+ <port id="1" precision="FP32" />
171
+ <port id="2" precision="FP32" />
172
+ <port id="3" precision="FP32" />
173
+ <port id="4" precision="FP32" />
174
+ </input>
175
+ <output>
176
+ <port id="5" precision="FP32">
177
+ <dim>1</dim>
178
+ <dim>160</dim>
179
+ </port>
180
+ </output>
181
+ </layer>
182
+ <layer id="16" name="/time_proj/Cos" type="Cos" version="opset1">
183
+ <input>
184
+ <port id="0" precision="FP32">
185
+ <dim>1</dim>
186
+ <dim>160</dim>
187
+ </port>
188
+ </input>
189
+ <output>
190
+ <port id="1" precision="FP32" names="/time_proj/Cos_output_0">
191
+ <dim>1</dim>
192
+ <dim>160</dim>
193
+ </port>
194
+ </output>
195
+ </layer>
196
+ <layer id="17" name="250862509070744" type="Const" version="opset1">
197
+ <data element_type="f32" shape="" offset="656" size="4" />
198
+ <output>
199
+ <port id="0" precision="FP32" />
200
+ </output>
201
+ </layer>
202
+ <layer id="18" name="250872509175832" type="Const" version="opset1">
203
+ <data element_type="f32" shape="" offset="660" size="4" />
204
+ <output>
205
+ <port id="0" precision="FP32" />
206
+ </output>
207
+ </layer>
208
+ <layer id="19" name="250882509271665" type="Const" version="opset1">
209
+ <data element_type="f32" shape="" offset="656" size="4" />
210
+ <output>
211
+ <port id="0" precision="FP32" />
212
+ </output>
213
+ </layer>
214
+ <layer id="20" name="250892509371395" type="Const" version="opset1">
215
+ <data element_type="f32" shape="" offset="660" size="4" />
216
+ <output>
217
+ <port id="0" precision="FP32" />
218
+ </output>
219
+ </layer>
220
+ <layer id="21" name="/time_proj/Concat/fq_input_1" type="FakeQuantize" version="opset1">
221
+ <data levels="256" auto_broadcast="numpy" />
222
+ <input>
223
+ <port id="0" precision="FP32">
224
+ <dim>1</dim>
225
+ <dim>160</dim>
226
+ </port>
227
+ <port id="1" precision="FP32" />
228
+ <port id="2" precision="FP32" />
229
+ <port id="3" precision="FP32" />
230
+ <port id="4" precision="FP32" />
231
+ </input>
232
+ <output>
233
+ <port id="5" precision="FP32">
234
+ <dim>1</dim>
235
+ <dim>160</dim>
236
+ </port>
237
+ </output>
238
+ </layer>
239
+ <layer id="22" name="/time_proj/Concat" type="Concat" version="opset1">
240
+ <data axis="1" />
241
+ <input>
242
+ <port id="0" precision="FP32">
243
+ <dim>1</dim>
244
+ <dim>160</dim>
245
+ </port>
246
+ <port id="1" precision="FP32">
247
+ <dim>1</dim>
248
+ <dim>160</dim>
249
+ </port>
250
+ </input>
251
+ <output>
252
+ <port id="2" precision="FP32" names="/time_proj/Concat_output_0">
253
+ <dim>1</dim>
254
+ <dim>320</dim>
255
+ </port>
256
+ </output>
257
+ </layer>
258
+ <layer id="23" name="Constant_182093854" type="Const" version="opset1">
259
+ <data element_type="i64" shape="2" offset="664" size="16" />
260
+ <output>
261
+ <port id="0" precision="I64">
262
+ <dim>2</dim>
263
+ </port>
264
+ </output>
265
+ </layer>
266
+ <layer id="24" name="Constant_182123855" type="Const" version="opset1">
267
+ <data element_type="i64" shape="2" offset="680" size="16" />
268
+ <output>
269
+ <port id="0" precision="I64">
270
+ <dim>2</dim>
271
+ </port>
272
+ </output>
273
+ </layer>
274
+ <layer id="25" name="Constant_182153856" type="Const" version="opset1">
275
+ <data element_type="i64" shape="2" offset="696" size="16" />
276
+ <output>
277
+ <port id="0" precision="I64">
278
+ <dim>2</dim>
279
+ </port>
280
+ </output>
281
+ </layer>
282
+ <layer id="26" name="/time_proj/Slice" type="StridedSlice" version="opset1">
283
+ <data begin_mask="1, 0" end_mask="1, 0" new_axis_mask="" shrink_axis_mask="" ellipsis_mask="" />
284
+ <input>
285
+ <port id="0" precision="FP32">
286
+ <dim>1</dim>
287
+ <dim>320</dim>
288
+ </port>
289
+ <port id="1" precision="I64">
290
+ <dim>2</dim>
291
+ </port>
292
+ <port id="2" precision="I64">
293
+ <dim>2</dim>
294
+ </port>
295
+ <port id="3" precision="I64">
296
+ <dim>2</dim>
297
+ </port>
298
+ </input>
299
+ <output>
300
+ <port id="4" precision="FP32" names="/time_proj/Slice_output_0">
301
+ <dim>1</dim>
302
+ <dim>160</dim>
303
+ </port>
304
+ </output>
305
+ </layer>
306
+ <layer id="27" name="Constant_182213858" type="Const" version="opset1">
307
+ <data element_type="i64" shape="2" offset="712" size="16" />
308
+ <output>
309
+ <port id="0" precision="I64">
310
+ <dim>2</dim>
311
+ </port>
312
+ </output>
313
+ </layer>
314
+ <layer id="28" name="Constant_182243859" type="Const" version="opset1">
315
+ <data element_type="i64" shape="2" offset="664" size="16" />
316
+ <output>
317
+ <port id="0" precision="I64">
318
+ <dim>2</dim>
319
+ </port>
320
+ </output>
321
+ </layer>
322
+ <layer id="29" name="Constant_182273860" type="Const" version="opset1">
323
+ <data element_type="i64" shape="2" offset="696" size="16" />
324
+ <output>
325
+ <port id="0" precision="I64">
326
+ <dim>2</dim>
327
+ </port>
328
+ </output>
329
+ </layer>
330
+ <layer id="30" name="/time_proj/Slice_1" type="StridedSlice" version="opset1">
331
+ <data begin_mask="1, 0" end_mask="1, 0" new_axis_mask="" shrink_axis_mask="" ellipsis_mask="" />
332
+ <input>
333
+ <port id="0" precision="FP32">
334
+ <dim>1</dim>
335
+ <dim>320</dim>
336
+ </port>
337
+ <port id="1" precision="I64">
338
+ <dim>2</dim>
339
+ </port>
340
+ <port id="2" precision="I64">
341
+ <dim>2</dim>
342
+ </port>
343
+ <port id="3" precision="I64">
344
+ <dim>2</dim>
345
+ </port>
346
+ </input>
347
+ <output>
348
+ <port id="4" precision="FP32" names="/time_proj/Slice_1_output_0">
349
+ <dim>1</dim>
350
+ <dim>160</dim>
351
+ </port>
352
+ </output>
353
+ </layer>
354
+ <layer id="31" name="/time_proj/Concat_1" type="Concat" version="opset1">
355
+ <data axis="1" />
356
+ <input>
357
+ <port id="0" precision="FP32">
358
+ <dim>1</dim>
359
+ <dim>160</dim>
360
+ </port>
361
+ <port id="1" precision="FP32">
362
+ <dim>1</dim>
363
+ <dim>160</dim>
364
+ </port>
365
+ </input>
366
+ <output>
367
+ <port id="2" precision="FP32" names="/Cast_1_output_0,/time_proj/Concat_1_output_0">
368
+ <dim>1</dim>
369
+ <dim>320</dim>
370
+ </port>
371
+ </output>
372
+ </layer>
373
+ <layer id="32" name="time_embedding.linear_1.weight386341283/quantized5001874713" type="Const" version="opset1">
374
+ <data element_type="i8" shape="1280, 320" offset="728" size="409600" />
375
+ <output>
376
+ <port id="0" precision="I8">
377
+ <dim>1280</dim>
378
+ <dim>320</dim>
379
+ </port>
380
+ </output>
381
+ </layer>
382
+ <layer id="33" name="time_embedding.linear_1.weight386341283/quantized/to_f32" type="Convert" version="opset1">
383
+ <data destination_type="f32" />
384
+ <input>
385
+ <port id="0" precision="I8">
386
+ <dim>1280</dim>
387
+ <dim>320</dim>
388
+ </port>
389
+ </input>
390
+ <output>
391
+ <port id="1" precision="FP32">
392
+ <dim>1280</dim>
393
+ <dim>320</dim>
394
+ </port>
395
+ </output>
396
+ </layer>
397
+ <layer id="34" name="/time_embedding/linear_1/Gemm/WithoutBiases/fq_weights_1/scale5002673147" type="Const" version="opset1">
398
+ <data element_type="f32" shape="1280, 1" offset="410328" size="5120" />
399
+ <output>
400
+ <port id="0" precision="FP32">
401
+ <dim>1280</dim>
402
+ <dim>1</dim>
403
+ </port>
404
+ </output>
405
+ </layer>
406
+ <layer id="35" name="/time_embedding/linear_1/Gemm/WithoutBiases/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
407
+ <data auto_broadcast="numpy" />
408
+ <input>
409
+ <port id="0" precision="FP32">
410
+ <dim>1280</dim>
411
+ <dim>320</dim>
412
+ </port>
413
+ <port id="1" precision="FP32">
414
+ <dim>1280</dim>
415
+ <dim>1</dim>
416
+ </port>
417
+ </input>
418
+ <output>
419
+ <port id="2" precision="FP32">
420
+ <dim>1280</dim>
421
+ <dim>320</dim>
422
+ </port>
423
+ </output>
424
+ </layer>
425
+ <layer id="36" name="/time_embedding/linear_1/Gemm/WithoutBiases" type="MatMul" version="opset1">
426
+ <data transpose_a="false" transpose_b="true" />
427
+ <input>
428
+ <port id="0" precision="FP32">
429
+ <dim>1</dim>
430
+ <dim>320</dim>
431
+ </port>
432
+ <port id="1" precision="FP32">
433
+ <dim>1280</dim>
434
+ <dim>320</dim>
435
+ </port>
436
+ </input>
437
+ <output>
438
+ <port id="2" precision="FP32">
439
+ <dim>1</dim>
440
+ <dim>1280</dim>
441
+ </port>
442
+ </output>
443
+ </layer>
444
+ <layer id="37" name="Constant_22762386570027" type="Const" version="opset1">
445
+ <data element_type="f32" shape="1, 1280" offset="415448" size="5120" />
446
+ <output>
447
+ <port id="0" precision="FP32">
448
+ <dim>1</dim>
449
+ <dim>1280</dim>
450
+ </port>
451
+ </output>
452
+ </layer>
453
+ <layer id="38" name="/time_embedding/linear_1/Gemm" type="Add" version="opset1">
454
+ <data auto_broadcast="numpy" />
455
+ <input>
456
+ <port id="0" precision="FP32">
457
+ <dim>1</dim>
458
+ <dim>1280</dim>
459
+ </port>
460
+ <port id="1" precision="FP32">
461
+ <dim>1</dim>
462
+ <dim>1280</dim>
463
+ </port>
464
+ </input>
465
+ <output>
466
+ <port id="2" precision="FP32" names="/time_embedding/linear_1/Gemm_output_0">
467
+ <dim>1</dim>
468
+ <dim>1280</dim>
469
+ </port>
470
+ </output>
471
+ </layer>
472
+ <layer id="39" name="/time_embedding/linear_1/Gemm0" type="Result" version="opset1">
473
+ <input>
474
+ <port id="0" precision="FP32">
475
+ <dim>1</dim>
476
+ <dim>1280</dim>
477
+ </port>
478
+ </input>
479
+ </layer>
480
+ </layers>
481
+ <edges>
482
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
483
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
484
+ <edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
485
+ <edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
486
+ <edge from-layer="4" from-port="2" to-layer="6" to-port="0" />
487
+ <edge from-layer="5" from-port="0" to-layer="6" to-port="1" />
488
+ <edge from-layer="6" from-port="2" to-layer="7" to-port="0" />
489
+ <edge from-layer="7" from-port="1" to-layer="9" to-port="0" />
490
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="1" />
491
+ <edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
492
+ <edge from-layer="9" from-port="2" to-layer="16" to-port="0" />
493
+ <edge from-layer="10" from-port="1" to-layer="15" to-port="0" />
494
+ <edge from-layer="11" from-port="0" to-layer="15" to-port="1" />
495
+ <edge from-layer="12" from-port="0" to-layer="15" to-port="2" />
496
+ <edge from-layer="13" from-port="0" to-layer="15" to-port="3" />
497
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="4" />
498
+ <edge from-layer="15" from-port="5" to-layer="22" to-port="0" />
499
+ <edge from-layer="16" from-port="1" to-layer="21" to-port="0" />
500
+ <edge from-layer="17" from-port="0" to-layer="21" to-port="1" />
501
+ <edge from-layer="18" from-port="0" to-layer="21" to-port="2" />
502
+ <edge from-layer="19" from-port="0" to-layer="21" to-port="3" />
503
+ <edge from-layer="20" from-port="0" to-layer="21" to-port="4" />
504
+ <edge from-layer="21" from-port="5" to-layer="22" to-port="1" />
505
+ <edge from-layer="22" from-port="2" to-layer="26" to-port="0" />
506
+ <edge from-layer="22" from-port="2" to-layer="30" to-port="0" />
507
+ <edge from-layer="23" from-port="0" to-layer="26" to-port="1" />
508
+ <edge from-layer="24" from-port="0" to-layer="26" to-port="2" />
509
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="3" />
510
+ <edge from-layer="26" from-port="4" to-layer="31" to-port="0" />
511
+ <edge from-layer="27" from-port="0" to-layer="30" to-port="1" />
512
+ <edge from-layer="28" from-port="0" to-layer="30" to-port="2" />
513
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="3" />
514
+ <edge from-layer="30" from-port="4" to-layer="31" to-port="1" />
515
+ <edge from-layer="31" from-port="2" to-layer="36" to-port="0" />
516
+ <edge from-layer="32" from-port="0" to-layer="33" to-port="0" />
517
+ <edge from-layer="33" from-port="1" to-layer="35" to-port="0" />
518
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
519
+ <edge from-layer="35" from-port="2" to-layer="36" to-port="1" />
520
+ <edge from-layer="36" from-port="2" to-layer="38" to-port="0" />
521
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
522
+ <edge from-layer="38" from-port="2" to-layer="39" to-port="0" />
523
+ </edges>
524
+ <rt_info />
525
+ </net>
INT8/vae_decoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76cc46b58fde57f076ca27414210e3d43380a051e612a7e863b4691d9ca3030c
3
+ size 98980680
INT8/vae_decoder.xml ADDED
The diff for this file is too large to render. See raw diff