Scheduler changed to EulerAncestralDiscreteScheduler
Browse files- model_index.json +1 -1
- scheduler/scheduler_config.json +1 -1
- text_encoder/openvino_model.xml +73 -73
- unet/openvino_model.xml +176 -176
- vae_decoder/openvino_model.xml +6 -6
- vae_encoder/openvino_model.xml +12 -12
model_index.json
CHANGED
@@ -13,7 +13,7 @@
|
|
13 |
],
|
14 |
"scheduler": [
|
15 |
"diffusers",
|
16 |
-
"
|
17 |
],
|
18 |
"text_encoder": [
|
19 |
"transformers",
|
|
|
13 |
],
|
14 |
"scheduler": [
|
15 |
"diffusers",
|
16 |
+
"EulerAncestralDiscreteScheduler"
|
17 |
],
|
18 |
"text_encoder": [
|
19 |
"transformers",
|
scheduler/scheduler_config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_class_name": "
|
3 |
"_diffusers_version": "0.23.0",
|
4 |
"beta_end": 0.012,
|
5 |
"beta_schedule": "scaled_linear",
|
|
|
1 |
{
|
2 |
+
"_class_name": "EulerAncestralDiscreteScheduler",
|
3 |
"_diffusers_version": "0.23.0",
|
4 |
"beta_end": 0.012,
|
5 |
"beta_schedule": "scaled_linear",
|
text_encoder/openvino_model.xml
CHANGED
@@ -27,7 +27,7 @@
|
|
27 |
</port>
|
28 |
</output>
|
29 |
</layer>
|
30 |
-
<layer id="3" name="__module.text_model/aten::size/
|
31 |
<data output_type="i32" />
|
32 |
<input>
|
33 |
<port id="0" precision="I32">
|
@@ -712,7 +712,7 @@
|
|
712 |
<port id="2" precision="I32" names="441,442" />
|
713 |
</output>
|
714 |
</layer>
|
715 |
-
<layer id="57" name="
|
716 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
717 |
<output>
|
718 |
<port id="0" precision="I64">
|
@@ -720,7 +720,7 @@
|
|
720 |
</port>
|
721 |
</output>
|
722 |
</layer>
|
723 |
-
<layer id="58" name="__module.text_model.encoder.layers.0.self_attn/prim::
|
724 |
<data special_zero="false" />
|
725 |
<input>
|
726 |
<port id="0" precision="I32" />
|
@@ -1158,7 +1158,7 @@
|
|
1158 |
</port>
|
1159 |
</output>
|
1160 |
</layer>
|
1161 |
-
<layer id="89" name="__module.text_model/aten::
|
1162 |
<data output_type="i32" />
|
1163 |
<input>
|
1164 |
<port id="0" precision="FP32">
|
@@ -2570,7 +2570,7 @@
|
|
2570 |
<port id="3" precision="I32" names="474" />
|
2571 |
</output>
|
2572 |
</layer>
|
2573 |
-
<layer id="185" name="
|
2574 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
2575 |
<output>
|
2576 |
<port id="0" precision="I64">
|
@@ -2578,7 +2578,7 @@
|
|
2578 |
</port>
|
2579 |
</output>
|
2580 |
</layer>
|
2581 |
-
<layer id="186" name="__module.text_model.encoder.layers.1.self_attn/prim::
|
2582 |
<data special_zero="false" />
|
2583 |
<input>
|
2584 |
<port id="0" precision="I32" />
|
@@ -4010,7 +4010,7 @@
|
|
4010 |
</port>
|
4011 |
</output>
|
4012 |
</layer>
|
4013 |
-
<layer id="280" name="__module.text_model.encoder.layers.2.self_attn/aten::size/
|
4014 |
<data output_type="i32" />
|
4015 |
<input>
|
4016 |
<port id="0" precision="FP32">
|
@@ -4044,7 +4044,7 @@
|
|
4044 |
<port id="3" precision="I32" names="522" />
|
4045 |
</output>
|
4046 |
</layer>
|
4047 |
-
<layer id="283" name="
|
4048 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
4049 |
<output>
|
4050 |
<port id="0" precision="I64">
|
@@ -4052,7 +4052,7 @@
|
|
4052 |
</port>
|
4053 |
</output>
|
4054 |
</layer>
|
4055 |
-
<layer id="284" name="__module.text_model.encoder.layers.2.self_attn/prim::
|
4056 |
<data special_zero="false" />
|
4057 |
<input>
|
4058 |
<port id="0" precision="I32" />
|
@@ -5484,7 +5484,7 @@
|
|
5484 |
</port>
|
5485 |
</output>
|
5486 |
</layer>
|
5487 |
-
<layer id="378" name="__module.text_model.encoder.layers.3.self_attn/aten::size/
|
5488 |
<data output_type="i32" />
|
5489 |
<input>
|
5490 |
<port id="0" precision="FP32">
|
@@ -5518,7 +5518,7 @@
|
|
5518 |
<port id="3" precision="I32" names="570" />
|
5519 |
</output>
|
5520 |
</layer>
|
5521 |
-
<layer id="381" name="
|
5522 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
5523 |
<output>
|
5524 |
<port id="0" precision="I64">
|
@@ -5526,7 +5526,7 @@
|
|
5526 |
</port>
|
5527 |
</output>
|
5528 |
</layer>
|
5529 |
-
<layer id="382" name="__module.text_model.encoder.layers.3.self_attn/prim::
|
5530 |
<data special_zero="false" />
|
5531 |
<input>
|
5532 |
<port id="0" precision="I32" />
|
@@ -6958,7 +6958,7 @@
|
|
6958 |
</port>
|
6959 |
</output>
|
6960 |
</layer>
|
6961 |
-
<layer id="476" name="__module.text_model.encoder.layers.4.self_attn/aten::size/
|
6962 |
<data output_type="i32" />
|
6963 |
<input>
|
6964 |
<port id="0" precision="FP32">
|
@@ -6992,7 +6992,7 @@
|
|
6992 |
<port id="3" precision="I32" names="618" />
|
6993 |
</output>
|
6994 |
</layer>
|
6995 |
-
<layer id="479" name="
|
6996 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
6997 |
<output>
|
6998 |
<port id="0" precision="I64">
|
@@ -7000,7 +7000,7 @@
|
|
7000 |
</port>
|
7001 |
</output>
|
7002 |
</layer>
|
7003 |
-
<layer id="480" name="__module.text_model.encoder.layers.4.self_attn/prim::
|
7004 |
<data special_zero="false" />
|
7005 |
<input>
|
7006 |
<port id="0" precision="I32" />
|
@@ -7143,7 +7143,7 @@
|
|
7143 |
<port id="2" precision="I32" names="633,634" />
|
7144 |
</output>
|
7145 |
</layer>
|
7146 |
-
<layer id="491" name="
|
7147 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
7148 |
<output>
|
7149 |
<port id="0" precision="I64">
|
@@ -7151,7 +7151,7 @@
|
|
7151 |
</port>
|
7152 |
</output>
|
7153 |
</layer>
|
7154 |
-
<layer id="492" name="__module.text_model.encoder.layers.4.self_attn/prim::
|
7155 |
<data special_zero="false" />
|
7156 |
<input>
|
7157 |
<port id="0" precision="I32" />
|
@@ -8432,7 +8432,7 @@
|
|
8432 |
</port>
|
8433 |
</output>
|
8434 |
</layer>
|
8435 |
-
<layer id="574" name="__module.text_model.encoder.layers.5.self_attn/aten::size/
|
8436 |
<data output_type="i32" />
|
8437 |
<input>
|
8438 |
<port id="0" precision="FP32">
|
@@ -8466,7 +8466,7 @@
|
|
8466 |
<port id="3" precision="I32" names="666" />
|
8467 |
</output>
|
8468 |
</layer>
|
8469 |
-
<layer id="577" name="
|
8470 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
8471 |
<output>
|
8472 |
<port id="0" precision="I64">
|
@@ -8474,7 +8474,7 @@
|
|
8474 |
</port>
|
8475 |
</output>
|
8476 |
</layer>
|
8477 |
-
<layer id="578" name="__module.text_model.encoder.layers.5.self_attn/prim::
|
8478 |
<data special_zero="false" />
|
8479 |
<input>
|
8480 |
<port id="0" precision="I32" />
|
@@ -9906,7 +9906,7 @@
|
|
9906 |
</port>
|
9907 |
</output>
|
9908 |
</layer>
|
9909 |
-
<layer id="672" name="__module.text_model.encoder.layers.6.self_attn/aten::size/
|
9910 |
<data output_type="i32" />
|
9911 |
<input>
|
9912 |
<port id="0" precision="FP32">
|
@@ -9940,7 +9940,7 @@
|
|
9940 |
<port id="3" precision="I32" names="714" />
|
9941 |
</output>
|
9942 |
</layer>
|
9943 |
-
<layer id="675" name="
|
9944 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
9945 |
<output>
|
9946 |
<port id="0" precision="I64">
|
@@ -9948,7 +9948,7 @@
|
|
9948 |
</port>
|
9949 |
</output>
|
9950 |
</layer>
|
9951 |
-
<layer id="676" name="__module.text_model.encoder.layers.6.self_attn/prim::
|
9952 |
<data special_zero="false" />
|
9953 |
<input>
|
9954 |
<port id="0" precision="I32" />
|
@@ -10091,7 +10091,7 @@
|
|
10091 |
<port id="2" precision="I32" names="729,730" />
|
10092 |
</output>
|
10093 |
</layer>
|
10094 |
-
<layer id="687" name="
|
10095 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
10096 |
<output>
|
10097 |
<port id="0" precision="I64">
|
@@ -10099,7 +10099,7 @@
|
|
10099 |
</port>
|
10100 |
</output>
|
10101 |
</layer>
|
10102 |
-
<layer id="688" name="__module.text_model.encoder.layers.6.self_attn/prim::
|
10103 |
<data special_zero="false" />
|
10104 |
<input>
|
10105 |
<port id="0" precision="I32" />
|
@@ -11414,7 +11414,7 @@
|
|
11414 |
<port id="3" precision="I32" names="762" />
|
11415 |
</output>
|
11416 |
</layer>
|
11417 |
-
<layer id="773" name="
|
11418 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
11419 |
<output>
|
11420 |
<port id="0" precision="I64">
|
@@ -11422,7 +11422,7 @@
|
|
11422 |
</port>
|
11423 |
</output>
|
11424 |
</layer>
|
11425 |
-
<layer id="774" name="__module.text_model.encoder.layers.7.self_attn/prim::
|
11426 |
<data special_zero="false" />
|
11427 |
<input>
|
11428 |
<port id="0" precision="I32" />
|
@@ -11565,7 +11565,7 @@
|
|
11565 |
<port id="2" precision="I32" names="777,778" />
|
11566 |
</output>
|
11567 |
</layer>
|
11568 |
-
<layer id="785" name="
|
11569 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
11570 |
<output>
|
11571 |
<port id="0" precision="I64">
|
@@ -11573,7 +11573,7 @@
|
|
11573 |
</port>
|
11574 |
</output>
|
11575 |
</layer>
|
11576 |
-
<layer id="786" name="__module.text_model.encoder.layers.7.self_attn/prim::
|
11577 |
<data special_zero="false" />
|
11578 |
<input>
|
11579 |
<port id="0" precision="I32" />
|
@@ -12888,7 +12888,7 @@
|
|
12888 |
<port id="3" precision="I32" names="810" />
|
12889 |
</output>
|
12890 |
</layer>
|
12891 |
-
<layer id="871" name="
|
12892 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
12893 |
<output>
|
12894 |
<port id="0" precision="I64">
|
@@ -12896,7 +12896,7 @@
|
|
12896 |
</port>
|
12897 |
</output>
|
12898 |
</layer>
|
12899 |
-
<layer id="872" name="__module.text_model.encoder.layers.8.self_attn/prim::
|
12900 |
<data special_zero="false" />
|
12901 |
<input>
|
12902 |
<port id="0" precision="I32" />
|
@@ -14328,7 +14328,7 @@
|
|
14328 |
</port>
|
14329 |
</output>
|
14330 |
</layer>
|
14331 |
-
<layer id="966" name="__module.text_model.encoder.layers.9.self_attn/aten::size/
|
14332 |
<data output_type="i32" />
|
14333 |
<input>
|
14334 |
<port id="0" precision="FP32">
|
@@ -15836,7 +15836,7 @@
|
|
15836 |
<port id="3" precision="I32" names="906" />
|
15837 |
</output>
|
15838 |
</layer>
|
15839 |
-
<layer id="1067" name="
|
15840 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
15841 |
<output>
|
15842 |
<port id="0" precision="I64">
|
@@ -15844,7 +15844,7 @@
|
|
15844 |
</port>
|
15845 |
</output>
|
15846 |
</layer>
|
15847 |
-
<layer id="1068" name="__module.text_model.encoder.layers.10.self_attn/prim::
|
15848 |
<data special_zero="false" />
|
15849 |
<input>
|
15850 |
<port id="0" precision="I32" />
|
@@ -17276,7 +17276,7 @@
|
|
17276 |
</port>
|
17277 |
</output>
|
17278 |
</layer>
|
17279 |
-
<layer id="1162" name="__module.text_model.encoder.layers.11.self_attn/aten::size/
|
17280 |
<data output_type="i32" />
|
17281 |
<input>
|
17282 |
<port id="0" precision="FP32">
|
@@ -17310,7 +17310,7 @@
|
|
17310 |
<port id="3" precision="I32" names="954" />
|
17311 |
</output>
|
17312 |
</layer>
|
17313 |
-
<layer id="1165" name="
|
17314 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
17315 |
<output>
|
17316 |
<port id="0" precision="I64">
|
@@ -17318,7 +17318,7 @@
|
|
17318 |
</port>
|
17319 |
</output>
|
17320 |
</layer>
|
17321 |
-
<layer id="1166" name="__module.text_model.encoder.layers.11.self_attn/prim::
|
17322 |
<data special_zero="false" />
|
17323 |
<input>
|
17324 |
<port id="0" precision="I32" />
|
@@ -17461,7 +17461,7 @@
|
|
17461 |
<port id="2" precision="I32" names="969,970" />
|
17462 |
</output>
|
17463 |
</layer>
|
17464 |
-
<layer id="1177" name="
|
17465 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
17466 |
<output>
|
17467 |
<port id="0" precision="I64">
|
@@ -17469,7 +17469,7 @@
|
|
17469 |
</port>
|
17470 |
</output>
|
17471 |
</layer>
|
17472 |
-
<layer id="1178" name="__module.text_model.encoder.layers.11.self_attn/prim::
|
17473 |
<data special_zero="false" />
|
17474 |
<input>
|
17475 |
<port id="0" precision="I32" />
|
@@ -18750,7 +18750,7 @@
|
|
18750 |
</port>
|
18751 |
</output>
|
18752 |
</layer>
|
18753 |
-
<layer id="1260" name="__module.text_model.encoder.layers.12.self_attn/aten::size/
|
18754 |
<data output_type="i32" />
|
18755 |
<input>
|
18756 |
<port id="0" precision="FP32">
|
@@ -18784,7 +18784,7 @@
|
|
18784 |
<port id="3" precision="I32" names="1002" />
|
18785 |
</output>
|
18786 |
</layer>
|
18787 |
-
<layer id="1263" name="
|
18788 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
18789 |
<output>
|
18790 |
<port id="0" precision="I64">
|
@@ -18792,7 +18792,7 @@
|
|
18792 |
</port>
|
18793 |
</output>
|
18794 |
</layer>
|
18795 |
-
<layer id="1264" name="__module.text_model.encoder.layers.12.self_attn/prim::
|
18796 |
<data special_zero="false" />
|
18797 |
<input>
|
18798 |
<port id="0" precision="I32" />
|
@@ -20224,7 +20224,7 @@
|
|
20224 |
</port>
|
20225 |
</output>
|
20226 |
</layer>
|
20227 |
-
<layer id="1358" name="__module.text_model.encoder.layers.13.self_attn/aten::size/
|
20228 |
<data output_type="i32" />
|
20229 |
<input>
|
20230 |
<port id="0" precision="FP32">
|
@@ -21698,7 +21698,7 @@
|
|
21698 |
</port>
|
21699 |
</output>
|
21700 |
</layer>
|
21701 |
-
<layer id="1456" name="__module.text_model.encoder.layers.14.self_attn/aten::size/
|
21702 |
<data output_type="i32" />
|
21703 |
<input>
|
21704 |
<port id="0" precision="FP32">
|
@@ -21732,7 +21732,7 @@
|
|
21732 |
<port id="3" precision="I32" names="1098" />
|
21733 |
</output>
|
21734 |
</layer>
|
21735 |
-
<layer id="1459" name="
|
21736 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
21737 |
<output>
|
21738 |
<port id="0" precision="I64">
|
@@ -21740,7 +21740,7 @@
|
|
21740 |
</port>
|
21741 |
</output>
|
21742 |
</layer>
|
21743 |
-
<layer id="1460" name="__module.text_model.encoder.layers.14.self_attn/prim::
|
21744 |
<data special_zero="false" />
|
21745 |
<input>
|
21746 |
<port id="0" precision="I32" />
|
@@ -23172,7 +23172,7 @@
|
|
23172 |
</port>
|
23173 |
</output>
|
23174 |
</layer>
|
23175 |
-
<layer id="1554" name="__module.text_model.encoder.layers.15.self_attn/aten::size/
|
23176 |
<data output_type="i32" />
|
23177 |
<input>
|
23178 |
<port id="0" precision="FP32">
|
@@ -23206,7 +23206,7 @@
|
|
23206 |
<port id="3" precision="I32" names="1146" />
|
23207 |
</output>
|
23208 |
</layer>
|
23209 |
-
<layer id="1557" name="
|
23210 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
23211 |
<output>
|
23212 |
<port id="0" precision="I64">
|
@@ -23214,7 +23214,7 @@
|
|
23214 |
</port>
|
23215 |
</output>
|
23216 |
</layer>
|
23217 |
-
<layer id="1558" name="__module.text_model.encoder.layers.15.self_attn/prim::
|
23218 |
<data special_zero="false" />
|
23219 |
<input>
|
23220 |
<port id="0" precision="I32" />
|
@@ -23357,7 +23357,7 @@
|
|
23357 |
<port id="2" precision="I32" names="1161,1162" />
|
23358 |
</output>
|
23359 |
</layer>
|
23360 |
-
<layer id="1569" name="
|
23361 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
23362 |
<output>
|
23363 |
<port id="0" precision="I64">
|
@@ -23365,7 +23365,7 @@
|
|
23365 |
</port>
|
23366 |
</output>
|
23367 |
</layer>
|
23368 |
-
<layer id="1570" name="__module.text_model.encoder.layers.15.self_attn/prim::
|
23369 |
<data special_zero="false" />
|
23370 |
<input>
|
23371 |
<port id="0" precision="I32" />
|
@@ -24680,7 +24680,7 @@
|
|
24680 |
<port id="3" precision="I32" names="1194" />
|
24681 |
</output>
|
24682 |
</layer>
|
24683 |
-
<layer id="1655" name="
|
24684 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
24685 |
<output>
|
24686 |
<port id="0" precision="I64">
|
@@ -24688,7 +24688,7 @@
|
|
24688 |
</port>
|
24689 |
</output>
|
24690 |
</layer>
|
24691 |
-
<layer id="1656" name="__module.text_model.encoder.layers.16.self_attn/prim::
|
24692 |
<data special_zero="false" />
|
24693 |
<input>
|
24694 |
<port id="0" precision="I32" />
|
@@ -26154,7 +26154,7 @@
|
|
26154 |
<port id="3" precision="I32" names="1242" />
|
26155 |
</output>
|
26156 |
</layer>
|
26157 |
-
<layer id="1753" name="
|
26158 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
26159 |
<output>
|
26160 |
<port id="0" precision="I64">
|
@@ -26162,7 +26162,7 @@
|
|
26162 |
</port>
|
26163 |
</output>
|
26164 |
</layer>
|
26165 |
-
<layer id="1754" name="__module.text_model.encoder.layers.17.self_attn/prim::
|
26166 |
<data special_zero="false" />
|
26167 |
<input>
|
26168 |
<port id="0" precision="I32" />
|
@@ -29068,7 +29068,7 @@
|
|
29068 |
</port>
|
29069 |
</output>
|
29070 |
</layer>
|
29071 |
-
<layer id="1946" name="__module.text_model.encoder.layers.19.self_attn/aten::size/
|
29072 |
<data output_type="i32" />
|
29073 |
<input>
|
29074 |
<port id="0" precision="FP32">
|
@@ -29102,7 +29102,7 @@
|
|
29102 |
<port id="3" precision="I32" names="1338" />
|
29103 |
</output>
|
29104 |
</layer>
|
29105 |
-
<layer id="1949" name="
|
29106 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
29107 |
<output>
|
29108 |
<port id="0" precision="I64">
|
@@ -29110,7 +29110,7 @@
|
|
29110 |
</port>
|
29111 |
</output>
|
29112 |
</layer>
|
29113 |
-
<layer id="1950" name="__module.text_model.encoder.layers.19.self_attn/prim::
|
29114 |
<data special_zero="false" />
|
29115 |
<input>
|
29116 |
<port id="0" precision="I32" />
|
@@ -29253,7 +29253,7 @@
|
|
29253 |
<port id="2" precision="I32" names="1353,1354" />
|
29254 |
</output>
|
29255 |
</layer>
|
29256 |
-
<layer id="1961" name="
|
29257 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
29258 |
<output>
|
29259 |
<port id="0" precision="I64">
|
@@ -29261,7 +29261,7 @@
|
|
29261 |
</port>
|
29262 |
</output>
|
29263 |
</layer>
|
29264 |
-
<layer id="1962" name="__module.text_model.encoder.layers.19.self_attn/prim::
|
29265 |
<data special_zero="false" />
|
29266 |
<input>
|
29267 |
<port id="0" precision="I32" />
|
@@ -30542,7 +30542,7 @@
|
|
30542 |
</port>
|
30543 |
</output>
|
30544 |
</layer>
|
30545 |
-
<layer id="2044" name="__module.text_model.encoder.layers.20.self_attn/aten::size/
|
30546 |
<data output_type="i32" />
|
30547 |
<input>
|
30548 |
<port id="0" precision="FP32">
|
@@ -30576,7 +30576,7 @@
|
|
30576 |
<port id="3" precision="I32" names="1386" />
|
30577 |
</output>
|
30578 |
</layer>
|
30579 |
-
<layer id="2047" name="
|
30580 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
30581 |
<output>
|
30582 |
<port id="0" precision="I64">
|
@@ -30584,7 +30584,7 @@
|
|
30584 |
</port>
|
30585 |
</output>
|
30586 |
</layer>
|
30587 |
-
<layer id="2048" name="__module.text_model.encoder.layers.20.self_attn/prim::
|
30588 |
<data special_zero="false" />
|
30589 |
<input>
|
30590 |
<port id="0" precision="I32" />
|
@@ -30727,7 +30727,7 @@
|
|
30727 |
<port id="2" precision="I32" names="1401,1402" />
|
30728 |
</output>
|
30729 |
</layer>
|
30730 |
-
<layer id="2059" name="
|
30731 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
30732 |
<output>
|
30733 |
<port id="0" precision="I64">
|
@@ -30735,7 +30735,7 @@
|
|
30735 |
</port>
|
30736 |
</output>
|
30737 |
</layer>
|
30738 |
-
<layer id="2060" name="__module.text_model.encoder.layers.20.self_attn/prim::
|
30739 |
<data special_zero="false" />
|
30740 |
<input>
|
30741 |
<port id="0" precision="I32" />
|
@@ -32016,7 +32016,7 @@
|
|
32016 |
</port>
|
32017 |
</output>
|
32018 |
</layer>
|
32019 |
-
<layer id="2142" name="__module.text_model.encoder.layers.21.self_attn/aten::size/
|
32020 |
<data output_type="i32" />
|
32021 |
<input>
|
32022 |
<port id="0" precision="FP32">
|
@@ -32050,7 +32050,7 @@
|
|
32050 |
<port id="3" precision="I32" names="1434" />
|
32051 |
</output>
|
32052 |
</layer>
|
32053 |
-
<layer id="2145" name="
|
32054 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
32055 |
<output>
|
32056 |
<port id="0" precision="I64">
|
@@ -32058,7 +32058,7 @@
|
|
32058 |
</port>
|
32059 |
</output>
|
32060 |
</layer>
|
32061 |
-
<layer id="2146" name="__module.text_model.encoder.layers.21.self_attn/prim::
|
32062 |
<data special_zero="false" />
|
32063 |
<input>
|
32064 |
<port id="0" precision="I32" />
|
@@ -32201,7 +32201,7 @@
|
|
32201 |
<port id="2" precision="I32" names="1449,1450" />
|
32202 |
</output>
|
32203 |
</layer>
|
32204 |
-
<layer id="2157" name="
|
32205 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
32206 |
<output>
|
32207 |
<port id="0" precision="I64">
|
@@ -32209,7 +32209,7 @@
|
|
32209 |
</port>
|
32210 |
</output>
|
32211 |
</layer>
|
32212 |
-
<layer id="2158" name="__module.text_model.encoder.layers.21.self_attn/prim::
|
32213 |
<data special_zero="false" />
|
32214 |
<input>
|
32215 |
<port id="0" precision="I32" />
|
@@ -33490,7 +33490,7 @@
|
|
33490 |
</port>
|
33491 |
</output>
|
33492 |
</layer>
|
33493 |
-
<layer id="2240" name="__module.text_model.encoder.layers.22.self_attn/aten::size/
|
33494 |
<data output_type="i32" />
|
33495 |
<input>
|
33496 |
<port id="0" precision="FP32">
|
@@ -33524,7 +33524,7 @@
|
|
33524 |
<port id="3" precision="I32" names="1482" />
|
33525 |
</output>
|
33526 |
</layer>
|
33527 |
-
<layer id="2243" name="
|
33528 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
33529 |
<output>
|
33530 |
<port id="0" precision="I64">
|
@@ -33532,7 +33532,7 @@
|
|
33532 |
</port>
|
33533 |
</output>
|
33534 |
</layer>
|
33535 |
-
<layer id="2244" name="__module.text_model.encoder.layers.22.self_attn/prim::
|
33536 |
<data special_zero="false" />
|
33537 |
<input>
|
33538 |
<port id="0" precision="I32" />
|
|
|
27 |
</port>
|
28 |
</output>
|
29 |
</layer>
|
30 |
+
<layer id="3" name="__module.text_model/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
31 |
<data output_type="i32" />
|
32 |
<input>
|
33 |
<port id="0" precision="I32">
|
|
|
712 |
<port id="2" precision="I32" names="441,442" />
|
713 |
</output>
|
714 |
</layer>
|
715 |
+
<layer id="57" name="Constant_44069" type="Const" version="opset1">
|
716 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
717 |
<output>
|
718 |
<port id="0" precision="I64">
|
|
|
720 |
</port>
|
721 |
</output>
|
722 |
</layer>
|
723 |
+
<layer id="58" name="__module.text_model.encoder.layers.0.self_attn/prim::ListConstruct_140/Reshape" type="Reshape" version="opset1">
|
724 |
<data special_zero="false" />
|
725 |
<input>
|
726 |
<port id="0" precision="I32" />
|
|
|
1158 |
</port>
|
1159 |
</output>
|
1160 |
</layer>
|
1161 |
+
<layer id="89" name="__module.text_model/aten::size/ShapeOf_10" type="ShapeOf" version="opset3">
|
1162 |
<data output_type="i32" />
|
1163 |
<input>
|
1164 |
<port id="0" precision="FP32">
|
|
|
2570 |
<port id="3" precision="I32" names="474" />
|
2571 |
</output>
|
2572 |
</layer>
|
2573 |
+
<layer id="185" name="Constant_44090" type="Const" version="opset1">
|
2574 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
2575 |
<output>
|
2576 |
<port id="0" precision="I64">
|
|
|
2578 |
</port>
|
2579 |
</output>
|
2580 |
</layer>
|
2581 |
+
<layer id="186" name="__module.text_model.encoder.layers.1.self_attn/prim::ListConstruct_272/Reshape" type="Reshape" version="opset1">
|
2582 |
<data special_zero="false" />
|
2583 |
<input>
|
2584 |
<port id="0" precision="I32" />
|
|
|
4010 |
</port>
|
4011 |
</output>
|
4012 |
</layer>
|
4013 |
+
<layer id="280" name="__module.text_model.encoder.layers.2.self_attn/aten::size/ShapeOf_312" type="ShapeOf" version="opset3">
|
4014 |
<data output_type="i32" />
|
4015 |
<input>
|
4016 |
<port id="0" precision="FP32">
|
|
|
4044 |
<port id="3" precision="I32" names="522" />
|
4045 |
</output>
|
4046 |
</layer>
|
4047 |
+
<layer id="283" name="Constant_44104" type="Const" version="opset1">
|
4048 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
4049 |
<output>
|
4050 |
<port id="0" precision="I64">
|
|
|
4052 |
</port>
|
4053 |
</output>
|
4054 |
</layer>
|
4055 |
+
<layer id="284" name="__module.text_model.encoder.layers.2.self_attn/prim::ListConstruct_355/Reshape" type="Reshape" version="opset1">
|
4056 |
<data special_zero="false" />
|
4057 |
<input>
|
4058 |
<port id="0" precision="I32" />
|
|
|
5484 |
</port>
|
5485 |
</output>
|
5486 |
</layer>
|
5487 |
+
<layer id="378" name="__module.text_model.encoder.layers.3.self_attn/aten::size/ShapeOf_451" type="ShapeOf" version="opset3">
|
5488 |
<data output_type="i32" />
|
5489 |
<input>
|
5490 |
<port id="0" precision="FP32">
|
|
|
5518 |
<port id="3" precision="I32" names="570" />
|
5519 |
</output>
|
5520 |
</layer>
|
5521 |
+
<layer id="381" name="Constant_44151" type="Const" version="opset1">
|
5522 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
5523 |
<output>
|
5524 |
<port id="0" precision="I64">
|
|
|
5526 |
</port>
|
5527 |
</output>
|
5528 |
</layer>
|
5529 |
+
<layer id="382" name="__module.text_model.encoder.layers.3.self_attn/prim::ListConstruct_577/Reshape" type="Reshape" version="opset1">
|
5530 |
<data special_zero="false" />
|
5531 |
<input>
|
5532 |
<port id="0" precision="I32" />
|
|
|
6958 |
</port>
|
6959 |
</output>
|
6960 |
</layer>
|
6961 |
+
<layer id="476" name="__module.text_model.encoder.layers.4.self_attn/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
6962 |
<data output_type="i32" />
|
6963 |
<input>
|
6964 |
<port id="0" precision="FP32">
|
|
|
6992 |
<port id="3" precision="I32" names="618" />
|
6993 |
</output>
|
6994 |
</layer>
|
6995 |
+
<layer id="479" name="Constant_44176" type="Const" version="opset1">
|
6996 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
6997 |
<output>
|
6998 |
<port id="0" precision="I64">
|
|
|
7000 |
</port>
|
7001 |
</output>
|
7002 |
</layer>
|
7003 |
+
<layer id="480" name="__module.text_model.encoder.layers.4.self_attn/prim::ListConstruct_713/Reshape" type="Reshape" version="opset1">
|
7004 |
<data special_zero="false" />
|
7005 |
<input>
|
7006 |
<port id="0" precision="I32" />
|
|
|
7143 |
<port id="2" precision="I32" names="633,634" />
|
7144 |
</output>
|
7145 |
</layer>
|
7146 |
+
<layer id="491" name="Constant_44169" type="Const" version="opset1">
|
7147 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
7148 |
<output>
|
7149 |
<port id="0" precision="I64">
|
|
|
7151 |
</port>
|
7152 |
</output>
|
7153 |
</layer>
|
7154 |
+
<layer id="492" name="__module.text_model.encoder.layers.4.self_attn/prim::ListConstruct_684/Reshape" type="Reshape" version="opset1">
|
7155 |
<data special_zero="false" />
|
7156 |
<input>
|
7157 |
<port id="0" precision="I32" />
|
|
|
8432 |
</port>
|
8433 |
</output>
|
8434 |
</layer>
|
8435 |
+
<layer id="574" name="__module.text_model.encoder.layers.5.self_attn/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
8436 |
<data output_type="i32" />
|
8437 |
<input>
|
8438 |
<port id="0" precision="FP32">
|
|
|
8466 |
<port id="3" precision="I32" names="666" />
|
8467 |
</output>
|
8468 |
</layer>
|
8469 |
+
<layer id="577" name="Constant_44197" type="Const" version="opset1">
|
8470 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
8471 |
<output>
|
8472 |
<port id="0" precision="I64">
|
|
|
8474 |
</port>
|
8475 |
</output>
|
8476 |
</layer>
|
8477 |
+
<layer id="578" name="__module.text_model.encoder.layers.5.self_attn/prim::ListConstruct_825/Reshape" type="Reshape" version="opset1">
|
8478 |
<data special_zero="false" />
|
8479 |
<input>
|
8480 |
<port id="0" precision="I32" />
|
|
|
9906 |
</port>
|
9907 |
</output>
|
9908 |
</layer>
|
9909 |
+
<layer id="672" name="__module.text_model.encoder.layers.6.self_attn/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
9910 |
<data output_type="i32" />
|
9911 |
<input>
|
9912 |
<port id="0" precision="FP32">
|
|
|
9940 |
<port id="3" precision="I32" names="714" />
|
9941 |
</output>
|
9942 |
</layer>
|
9943 |
+
<layer id="675" name="Constant_44215" type="Const" version="opset1">
|
9944 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
9945 |
<output>
|
9946 |
<port id="0" precision="I64">
|
|
|
9948 |
</port>
|
9949 |
</output>
|
9950 |
</layer>
|
9951 |
+
<layer id="676" name="__module.text_model.encoder.layers.6.self_attn/prim::ListConstruct_952/Reshape" type="Reshape" version="opset1">
|
9952 |
<data special_zero="false" />
|
9953 |
<input>
|
9954 |
<port id="0" precision="I32" />
|
|
|
10091 |
<port id="2" precision="I32" names="729,730" />
|
10092 |
</output>
|
10093 |
</layer>
|
10094 |
+
<layer id="687" name="Constant_44219" type="Const" version="opset1">
|
10095 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
10096 |
<output>
|
10097 |
<port id="0" precision="I64">
|
|
|
10099 |
</port>
|
10100 |
</output>
|
10101 |
</layer>
|
10102 |
+
<layer id="688" name="__module.text_model.encoder.layers.6.self_attn/prim::ListConstruct_956/Reshape" type="Reshape" version="opset1">
|
10103 |
<data special_zero="false" />
|
10104 |
<input>
|
10105 |
<port id="0" precision="I32" />
|
|
|
11414 |
<port id="3" precision="I32" names="762" />
|
11415 |
</output>
|
11416 |
</layer>
|
11417 |
+
<layer id="773" name="Constant_44236" type="Const" version="opset1">
|
11418 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
11419 |
<output>
|
11420 |
<port id="0" precision="I64">
|
|
|
11422 |
</port>
|
11423 |
</output>
|
11424 |
</layer>
|
11425 |
+
<layer id="774" name="__module.text_model.encoder.layers.7.self_attn/prim::ListConstruct_1000/Reshape" type="Reshape" version="opset1">
|
11426 |
<data special_zero="false" />
|
11427 |
<input>
|
11428 |
<port id="0" precision="I32" />
|
|
|
11565 |
<port id="2" precision="I32" names="777,778" />
|
11566 |
</output>
|
11567 |
</layer>
|
11568 |
+
<layer id="785" name="Constant_44233" type="Const" version="opset1">
|
11569 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
11570 |
<output>
|
11571 |
<port id="0" precision="I64">
|
|
|
11573 |
</port>
|
11574 |
</output>
|
11575 |
</layer>
|
11576 |
+
<layer id="786" name="__module.text_model.encoder.layers.7.self_attn/prim::ListConstruct_1059/Reshape" type="Reshape" version="opset1">
|
11577 |
<data special_zero="false" />
|
11578 |
<input>
|
11579 |
<port id="0" precision="I32" />
|
|
|
12888 |
<port id="3" precision="I32" names="810" />
|
12889 |
</output>
|
12890 |
</layer>
|
12891 |
+
<layer id="871" name="Constant_44254" type="Const" version="opset1">
|
12892 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
12893 |
<output>
|
12894 |
<port id="0" precision="I64">
|
|
|
12896 |
</port>
|
12897 |
</output>
|
12898 |
</layer>
|
12899 |
+
<layer id="872" name="__module.text_model.encoder.layers.8.self_attn/prim::ListConstruct_1171/Reshape" type="Reshape" version="opset1">
|
12900 |
<data special_zero="false" />
|
12901 |
<input>
|
12902 |
<port id="0" precision="I32" />
|
|
|
14328 |
</port>
|
14329 |
</output>
|
14330 |
</layer>
|
14331 |
+
<layer id="966" name="__module.text_model.encoder.layers.9.self_attn/aten::size/ShapeOf_1264" type="ShapeOf" version="opset3">
|
14332 |
<data output_type="i32" />
|
14333 |
<input>
|
14334 |
<port id="0" precision="FP32">
|
|
|
15836 |
<port id="3" precision="I32" names="906" />
|
15837 |
</output>
|
15838 |
</layer>
|
15839 |
+
<layer id="1067" name="Constant_44311" type="Const" version="opset1">
|
15840 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
15841 |
<output>
|
15842 |
<port id="0" precision="I64">
|
|
|
15844 |
</port>
|
15845 |
</output>
|
15846 |
</layer>
|
15847 |
+
<layer id="1068" name="__module.text_model.encoder.layers.10.self_attn/prim::ListConstruct_1408/Reshape" type="Reshape" version="opset1">
|
15848 |
<data special_zero="false" />
|
15849 |
<input>
|
15850 |
<port id="0" precision="I32" />
|
|
|
17276 |
</port>
|
17277 |
</output>
|
17278 |
</layer>
|
17279 |
+
<layer id="1162" name="__module.text_model.encoder.layers.11.self_attn/aten::size/ShapeOf_1536" type="ShapeOf" version="opset3">
|
17280 |
<data output_type="i32" />
|
17281 |
<input>
|
17282 |
<port id="0" precision="FP32">
|
|
|
17310 |
<port id="3" precision="I32" names="954" />
|
17311 |
</output>
|
17312 |
</layer>
|
17313 |
+
<layer id="1165" name="Constant_44329" type="Const" version="opset1">
|
17314 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
17315 |
<output>
|
17316 |
<port id="0" precision="I64">
|
|
|
17318 |
</port>
|
17319 |
</output>
|
17320 |
</layer>
|
17321 |
+
<layer id="1166" name="__module.text_model.encoder.layers.11.self_attn/prim::ListConstruct_1579/Reshape" type="Reshape" version="opset1">
|
17322 |
<data special_zero="false" />
|
17323 |
<input>
|
17324 |
<port id="0" precision="I32" />
|
|
|
17461 |
<port id="2" precision="I32" names="969,970" />
|
17462 |
</output>
|
17463 |
</layer>
|
17464 |
+
<layer id="1177" name="Constant_44333" type="Const" version="opset1">
|
17465 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
17466 |
<output>
|
17467 |
<port id="0" precision="I64">
|
|
|
17469 |
</port>
|
17470 |
</output>
|
17471 |
</layer>
|
17472 |
+
<layer id="1178" name="__module.text_model.encoder.layers.11.self_attn/prim::ListConstruct_1603/Reshape" type="Reshape" version="opset1">
|
17473 |
<data special_zero="false" />
|
17474 |
<input>
|
17475 |
<port id="0" precision="I32" />
|
|
|
18750 |
</port>
|
18751 |
</output>
|
18752 |
</layer>
|
18753 |
+
<layer id="1260" name="__module.text_model.encoder.layers.12.self_attn/aten::size/ShapeOf_1672" type="ShapeOf" version="opset3">
|
18754 |
<data output_type="i32" />
|
18755 |
<input>
|
18756 |
<port id="0" precision="FP32">
|
|
|
18784 |
<port id="3" precision="I32" names="1002" />
|
18785 |
</output>
|
18786 |
</layer>
|
18787 |
+
<layer id="1263" name="Constant_44354" type="Const" version="opset1">
|
18788 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
18789 |
<output>
|
18790 |
<port id="0" precision="I64">
|
|
|
18792 |
</port>
|
18793 |
</output>
|
18794 |
</layer>
|
18795 |
+
<layer id="1264" name="__module.text_model.encoder.layers.12.self_attn/prim::ListConstruct_1715/Reshape" type="Reshape" version="opset1">
|
18796 |
<data special_zero="false" />
|
18797 |
<input>
|
18798 |
<port id="0" precision="I32" />
|
|
|
20224 |
</port>
|
20225 |
</output>
|
20226 |
</layer>
|
20227 |
+
<layer id="1358" name="__module.text_model.encoder.layers.13.self_attn/aten::size/ShapeOf_1811" type="ShapeOf" version="opset3">
|
20228 |
<data output_type="i32" />
|
20229 |
<input>
|
20230 |
<port id="0" precision="FP32">
|
|
|
21698 |
</port>
|
21699 |
</output>
|
21700 |
</layer>
|
21701 |
+
<layer id="1456" name="__module.text_model.encoder.layers.14.self_attn/aten::size/ShapeOf_1947" type="ShapeOf" version="opset3">
|
21702 |
<data output_type="i32" />
|
21703 |
<input>
|
21704 |
<port id="0" precision="FP32">
|
|
|
21732 |
<port id="3" precision="I32" names="1098" />
|
21733 |
</output>
|
21734 |
</layer>
|
21735 |
+
<layer id="1459" name="Constant_44404" type="Const" version="opset1">
|
21736 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
21737 |
<output>
|
21738 |
<port id="0" precision="I64">
|
|
|
21740 |
</port>
|
21741 |
</output>
|
21742 |
</layer>
|
21743 |
+
<layer id="1460" name="__module.text_model.encoder.layers.14.self_attn/prim::ListConstruct_1987/Reshape" type="Reshape" version="opset1">
|
21744 |
<data special_zero="false" />
|
21745 |
<input>
|
21746 |
<port id="0" precision="I32" />
|
|
|
23172 |
</port>
|
23173 |
</output>
|
23174 |
</layer>
|
23175 |
+
<layer id="1554" name="__module.text_model.encoder.layers.15.self_attn/aten::size/ShapeOf_2080" type="ShapeOf" version="opset3">
|
23176 |
<data output_type="i32" />
|
23177 |
<input>
|
23178 |
<port id="0" precision="FP32">
|
|
|
23206 |
<port id="3" precision="I32" names="1146" />
|
23207 |
</output>
|
23208 |
</layer>
|
23209 |
+
<layer id="1557" name="Constant_44447" type="Const" version="opset1">
|
23210 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
23211 |
<output>
|
23212 |
<port id="0" precision="I64">
|
|
|
23214 |
</port>
|
23215 |
</output>
|
23216 |
</layer>
|
23217 |
+
<layer id="1558" name="__module.text_model.encoder.layers.15.self_attn/prim::ListConstruct_2185/Reshape" type="Reshape" version="opset1">
|
23218 |
<data special_zero="false" />
|
23219 |
<input>
|
23220 |
<port id="0" precision="I32" />
|
|
|
23357 |
<port id="2" precision="I32" names="1161,1162" />
|
23358 |
</output>
|
23359 |
</layer>
|
23360 |
+
<layer id="1569" name="Constant_44444" type="Const" version="opset1">
|
23361 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
23362 |
<output>
|
23363 |
<port id="0" precision="I64">
|
|
|
23365 |
</port>
|
23366 |
</output>
|
23367 |
</layer>
|
23368 |
+
<layer id="1570" name="__module.text_model.encoder.layers.15.self_attn/prim::ListConstruct_2180/Reshape" type="Reshape" version="opset1">
|
23369 |
<data special_zero="false" />
|
23370 |
<input>
|
23371 |
<port id="0" precision="I32" />
|
|
|
24680 |
<port id="3" precision="I32" names="1194" />
|
24681 |
</output>
|
24682 |
</layer>
|
24683 |
+
<layer id="1655" name="Constant_44472" type="Const" version="opset1">
|
24684 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
24685 |
<output>
|
24686 |
<port id="0" precision="I64">
|
|
|
24688 |
</port>
|
24689 |
</output>
|
24690 |
</layer>
|
24691 |
+
<layer id="1656" name="__module.text_model.encoder.layers.16.self_attn/prim::ListConstruct_2321/Reshape" type="Reshape" version="opset1">
|
24692 |
<data special_zero="false" />
|
24693 |
<input>
|
24694 |
<port id="0" precision="I32" />
|
|
|
26154 |
<port id="3" precision="I32" names="1242" />
|
26155 |
</output>
|
26156 |
</layer>
|
26157 |
+
<layer id="1753" name="Constant_44490" type="Const" version="opset1">
|
26158 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
26159 |
<output>
|
26160 |
<port id="0" precision="I64">
|
|
|
26162 |
</port>
|
26163 |
</output>
|
26164 |
</layer>
|
26165 |
+
<layer id="1754" name="__module.text_model.encoder.layers.17.self_attn/prim::ListConstruct_2448/Reshape" type="Reshape" version="opset1">
|
26166 |
<data special_zero="false" />
|
26167 |
<input>
|
26168 |
<port id="0" precision="I32" />
|
|
|
29068 |
</port>
|
29069 |
</output>
|
29070 |
</layer>
|
29071 |
+
<layer id="1946" name="__module.text_model.encoder.layers.19.self_attn/aten::size/ShapeOf_2624" type="ShapeOf" version="opset3">
|
29072 |
<data output_type="i32" />
|
29073 |
<input>
|
29074 |
<port id="0" precision="FP32">
|
|
|
29102 |
<port id="3" precision="I32" names="1338" />
|
29103 |
</output>
|
29104 |
</layer>
|
29105 |
+
<layer id="1949" name="Constant_44529" type="Const" version="opset1">
|
29106 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
29107 |
<output>
|
29108 |
<port id="0" precision="I64">
|
|
|
29110 |
</port>
|
29111 |
</output>
|
29112 |
</layer>
|
29113 |
+
<layer id="1950" name="__module.text_model.encoder.layers.19.self_attn/prim::ListConstruct_2667/Reshape" type="Reshape" version="opset1">
|
29114 |
<data special_zero="false" />
|
29115 |
<input>
|
29116 |
<port id="0" precision="I32" />
|
|
|
29253 |
<port id="2" precision="I32" names="1353,1354" />
|
29254 |
</output>
|
29255 |
</layer>
|
29256 |
+
<layer id="1961" name="Constant_44533" type="Const" version="opset1">
|
29257 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
29258 |
<output>
|
29259 |
<port id="0" precision="I64">
|
|
|
29261 |
</port>
|
29262 |
</output>
|
29263 |
</layer>
|
29264 |
+
<layer id="1962" name="__module.text_model.encoder.layers.19.self_attn/prim::ListConstruct_2691/Reshape" type="Reshape" version="opset1">
|
29265 |
<data special_zero="false" />
|
29266 |
<input>
|
29267 |
<port id="0" precision="I32" />
|
|
|
30542 |
</port>
|
30543 |
</output>
|
30544 |
</layer>
|
30545 |
+
<layer id="2044" name="__module.text_model.encoder.layers.20.self_attn/aten::size/ShapeOf_2760" type="ShapeOf" version="opset3">
|
30546 |
<data output_type="i32" />
|
30547 |
<input>
|
30548 |
<port id="0" precision="FP32">
|
|
|
30576 |
<port id="3" precision="I32" names="1386" />
|
30577 |
</output>
|
30578 |
</layer>
|
30579 |
+
<layer id="2047" name="Constant_44561" type="Const" version="opset1">
|
30580 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
30581 |
<output>
|
30582 |
<port id="0" precision="I64">
|
|
|
30584 |
</port>
|
30585 |
</output>
|
30586 |
</layer>
|
30587 |
+
<layer id="2048" name="__module.text_model.encoder.layers.20.self_attn/prim::ListConstruct_2768/Reshape" type="Reshape" version="opset1">
|
30588 |
<data special_zero="false" />
|
30589 |
<input>
|
30590 |
<port id="0" precision="I32" />
|
|
|
30727 |
<port id="2" precision="I32" names="1401,1402" />
|
30728 |
</output>
|
30729 |
</layer>
|
30730 |
+
<layer id="2059" name="Constant_44558" type="Const" version="opset1">
|
30731 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
30732 |
<output>
|
30733 |
<port id="0" precision="I64">
|
|
|
30735 |
</port>
|
30736 |
</output>
|
30737 |
</layer>
|
30738 |
+
<layer id="2060" name="__module.text_model.encoder.layers.20.self_attn/prim::ListConstruct_2827/Reshape" type="Reshape" version="opset1">
|
30739 |
<data special_zero="false" />
|
30740 |
<input>
|
30741 |
<port id="0" precision="I32" />
|
|
|
32016 |
</port>
|
32017 |
</output>
|
32018 |
</layer>
|
32019 |
+
<layer id="2142" name="__module.text_model.encoder.layers.21.self_attn/aten::size/ShapeOf_2896" type="ShapeOf" version="opset3">
|
32020 |
<data output_type="i32" />
|
32021 |
<input>
|
32022 |
<port id="0" precision="FP32">
|
|
|
32050 |
<port id="3" precision="I32" names="1434" />
|
32051 |
</output>
|
32052 |
</layer>
|
32053 |
+
<layer id="2145" name="Constant_44579" type="Const" version="opset1">
|
32054 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
32055 |
<output>
|
32056 |
<port id="0" precision="I64">
|
|
|
32058 |
</port>
|
32059 |
</output>
|
32060 |
</layer>
|
32061 |
+
<layer id="2146" name="__module.text_model.encoder.layers.21.self_attn/prim::ListConstruct_2939/Reshape" type="Reshape" version="opset1">
|
32062 |
<data special_zero="false" />
|
32063 |
<input>
|
32064 |
<port id="0" precision="I32" />
|
|
|
32201 |
<port id="2" precision="I32" names="1449,1450" />
|
32202 |
</output>
|
32203 |
</layer>
|
32204 |
+
<layer id="2157" name="Constant_44583" type="Const" version="opset1">
|
32205 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
32206 |
<output>
|
32207 |
<port id="0" precision="I64">
|
|
|
32209 |
</port>
|
32210 |
</output>
|
32211 |
</layer>
|
32212 |
+
<layer id="2158" name="__module.text_model.encoder.layers.21.self_attn/prim::ListConstruct_2963/Reshape" type="Reshape" version="opset1">
|
32213 |
<data special_zero="false" />
|
32214 |
<input>
|
32215 |
<port id="0" precision="I32" />
|
|
|
33490 |
</port>
|
33491 |
</output>
|
33492 |
</layer>
|
33493 |
+
<layer id="2240" name="__module.text_model.encoder.layers.22.self_attn/aten::size/ShapeOf_3035" type="ShapeOf" version="opset3">
|
33494 |
<data output_type="i32" />
|
33495 |
<input>
|
33496 |
<port id="0" precision="FP32">
|
|
|
33524 |
<port id="3" precision="I32" names="1482" />
|
33525 |
</output>
|
33526 |
</layer>
|
33527 |
+
<layer id="2243" name="Constant_44604" type="Const" version="opset1">
|
33528 |
<data element_type="i64" shape="1" offset="202375180" size="8" />
|
33529 |
<output>
|
33530 |
<port id="0" precision="I64">
|
|
|
33532 |
</port>
|
33533 |
</output>
|
33534 |
</layer>
|
33535 |
+
<layer id="2244" name="__module.text_model.encoder.layers.22.self_attn/prim::ListConstruct_3075/Reshape" type="Reshape" version="opset1">
|
33536 |
<data special_zero="false" />
|
33537 |
<input>
|
33538 |
<port id="0" precision="I32" />
|
unet/openvino_model.xml
CHANGED
@@ -1684,7 +1684,7 @@
|
|
1684 |
</port>
|
1685 |
</output>
|
1686 |
</layer>
|
1687 |
-
<layer id="135" name="__module.down_blocks.0.attentions.0
|
1688 |
<data output_type="i32" />
|
1689 |
<input>
|
1690 |
<port id="0" precision="FP32">
|
@@ -2465,7 +2465,7 @@
|
|
2465 |
</port>
|
2466 |
</output>
|
2467 |
</layer>
|
2468 |
-
<layer id="194" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn1/aten::size/
|
2469 |
<data output_type="i32" />
|
2470 |
<input>
|
2471 |
<port id="0" precision="FP32">
|
@@ -2541,7 +2541,7 @@
|
|
2541 |
</port>
|
2542 |
</output>
|
2543 |
</layer>
|
2544 |
-
<layer id="200" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn1/aten::size/
|
2545 |
<data output_type="i32" />
|
2546 |
<input>
|
2547 |
<port id="0" precision="FP32">
|
@@ -3899,7 +3899,7 @@
|
|
3899 |
</port>
|
3900 |
</output>
|
3901 |
</layer>
|
3902 |
-
<layer id="300" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn2/aten::size/
|
3903 |
<data output_type="i32" />
|
3904 |
<input>
|
3905 |
<port id="0" precision="FP32">
|
@@ -4189,7 +4189,7 @@
|
|
4189 |
</port>
|
4190 |
</output>
|
4191 |
</layer>
|
4192 |
-
<layer id="323" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn2/aten::size/
|
4193 |
<data output_type="i32" />
|
4194 |
<input>
|
4195 |
<port id="0" precision="FP32">
|
@@ -4265,7 +4265,7 @@
|
|
4265 |
</port>
|
4266 |
</output>
|
4267 |
</layer>
|
4268 |
-
<layer id="329" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn2/aten::size/
|
4269 |
<data output_type="i32" />
|
4270 |
<input>
|
4271 |
<port id="0" precision="FP32">
|
@@ -6907,7 +6907,7 @@
|
|
6907 |
</port>
|
6908 |
</output>
|
6909 |
</layer>
|
6910 |
-
<layer id="509" name="__module.down_blocks.0.attentions.1/aten::
|
6911 |
<data output_type="i32" />
|
6912 |
<input>
|
6913 |
<port id="0" precision="FP32">
|
@@ -7370,7 +7370,7 @@
|
|
7370 |
</port>
|
7371 |
</output>
|
7372 |
</layer>
|
7373 |
-
<layer id="541" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/
|
7374 |
<data output_type="i32" />
|
7375 |
<input>
|
7376 |
<port id="0" precision="FP32">
|
@@ -7660,7 +7660,7 @@
|
|
7660 |
</port>
|
7661 |
</output>
|
7662 |
</layer>
|
7663 |
-
<layer id="564" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/
|
7664 |
<data output_type="i32" />
|
7665 |
<input>
|
7666 |
<port id="0" precision="FP32">
|
@@ -7736,7 +7736,7 @@
|
|
7736 |
</port>
|
7737 |
</output>
|
7738 |
</layer>
|
7739 |
-
<layer id="570" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/
|
7740 |
<data output_type="i32" />
|
7741 |
<input>
|
7742 |
<port id="0" precision="FP32">
|
@@ -7868,7 +7868,7 @@
|
|
7868 |
<port id="1" precision="I32" names="989,990" />
|
7869 |
</output>
|
7870 |
</layer>
|
7871 |
-
<layer id="583" name="
|
7872 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
7873 |
<output>
|
7874 |
<port id="0" precision="I64">
|
@@ -7876,7 +7876,7 @@
|
|
7876 |
</port>
|
7877 |
</output>
|
7878 |
</layer>
|
7879 |
-
<layer id="584" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/prim::
|
7880 |
<data special_zero="false" />
|
7881 |
<input>
|
7882 |
<port id="0" precision="I32" />
|
@@ -8286,7 +8286,7 @@
|
|
8286 |
</port>
|
8287 |
</output>
|
8288 |
</layer>
|
8289 |
-
<layer id="610" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/
|
8290 |
<data output_type="i32" />
|
8291 |
<input>
|
8292 |
<port id="0" precision="FP32">
|
@@ -8650,7 +8650,7 @@
|
|
8650 |
<port id="1" precision="I32" names="1027,1028" />
|
8651 |
</output>
|
8652 |
</layer>
|
8653 |
-
<layer id="639" name="
|
8654 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
8655 |
<output>
|
8656 |
<port id="0" precision="I64">
|
@@ -8658,7 +8658,7 @@
|
|
8658 |
</port>
|
8659 |
</output>
|
8660 |
</layer>
|
8661 |
-
<layer id="640" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/prim::
|
8662 |
<data special_zero="false" />
|
8663 |
<input>
|
8664 |
<port id="0" precision="I32" />
|
@@ -9094,7 +9094,7 @@
|
|
9094 |
</port>
|
9095 |
</output>
|
9096 |
</layer>
|
9097 |
-
<layer id="670" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/aten::size/
|
9098 |
<data output_type="i32" />
|
9099 |
<input>
|
9100 |
<port id="0" precision="FP32">
|
@@ -9226,7 +9226,7 @@
|
|
9226 |
<port id="1" precision="I32" names="1048,1049" />
|
9227 |
</output>
|
9228 |
</layer>
|
9229 |
-
<layer id="683" name="
|
9230 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
9231 |
<output>
|
9232 |
<port id="0" precision="I64">
|
@@ -9234,7 +9234,7 @@
|
|
9234 |
</port>
|
9235 |
</output>
|
9236 |
</layer>
|
9237 |
-
<layer id="684" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/prim::
|
9238 |
<data special_zero="false" />
|
9239 |
<input>
|
9240 |
<port id="0" precision="I32" />
|
@@ -9460,7 +9460,7 @@
|
|
9460 |
</port>
|
9461 |
</output>
|
9462 |
</layer>
|
9463 |
-
<layer id="699" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/aten::size/
|
9464 |
<data output_type="i32" />
|
9465 |
<input>
|
9466 |
<port id="0" precision="FP32">
|
@@ -9967,7 +9967,7 @@
|
|
9967 |
</port>
|
9968 |
</output>
|
9969 |
</layer>
|
9970 |
-
<layer id="735" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/aten::size/
|
9971 |
<data output_type="i32" />
|
9972 |
<input>
|
9973 |
<port id="0" precision="FP32">
|
@@ -10288,7 +10288,7 @@
|
|
10288 |
<port id="1" precision="I32" names="1100,1101" />
|
10289 |
</output>
|
10290 |
</layer>
|
10291 |
-
<layer id="760" name="
|
10292 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
10293 |
<output>
|
10294 |
<port id="0" precision="I64">
|
@@ -10296,7 +10296,7 @@
|
|
10296 |
</port>
|
10297 |
</output>
|
10298 |
</layer>
|
10299 |
-
<layer id="761" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/prim::
|
10300 |
<data special_zero="false" />
|
10301 |
<input>
|
10302 |
<port id="0" precision="I32" />
|
@@ -12232,7 +12232,7 @@
|
|
12232 |
</port>
|
12233 |
</output>
|
12234 |
</layer>
|
12235 |
-
<layer id="885" name="__module.down_blocks.1.attentions.0/aten::size/
|
12236 |
<data output_type="i32" />
|
12237 |
<input>
|
12238 |
<port id="0" precision="FP32">
|
@@ -12695,7 +12695,7 @@
|
|
12695 |
</port>
|
12696 |
</output>
|
12697 |
</layer>
|
12698 |
-
<layer id="917" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/aten::size/
|
12699 |
<data output_type="i32" />
|
12700 |
<input>
|
12701 |
<port id="0" precision="FP32">
|
@@ -12991,7 +12991,7 @@
|
|
12991 |
</port>
|
12992 |
</output>
|
12993 |
</layer>
|
12994 |
-
<layer id="941" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/aten::size/
|
12995 |
<data output_type="i32" />
|
12996 |
<input>
|
12997 |
<port id="0" precision="FP32">
|
@@ -13749,7 +13749,7 @@
|
|
13749 |
<port id="1" precision="I32" names="1193,1194" />
|
13750 |
</output>
|
13751 |
</layer>
|
13752 |
-
<layer id="1000" name="
|
13753 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
13754 |
<output>
|
13755 |
<port id="0" precision="I64">
|
@@ -13757,7 +13757,7 @@
|
|
13757 |
</port>
|
13758 |
</output>
|
13759 |
</layer>
|
13760 |
-
<layer id="1001" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::
|
13761 |
<data special_zero="false" />
|
13762 |
<input>
|
13763 |
<port id="0" precision="I32" />
|
@@ -13981,7 +13981,7 @@
|
|
13981 |
<port id="1" precision="I32" names="1217,1218" />
|
13982 |
</output>
|
13983 |
</layer>
|
13984 |
-
<layer id="1016" name="
|
13985 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
13986 |
<output>
|
13987 |
<port id="0" precision="I64">
|
@@ -13989,7 +13989,7 @@
|
|
13989 |
</port>
|
13990 |
</output>
|
13991 |
</layer>
|
13992 |
-
<layer id="1017" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::
|
13993 |
<data special_zero="false" />
|
13994 |
<input>
|
13995 |
<port id="0" precision="I32" />
|
@@ -14425,7 +14425,7 @@
|
|
14425 |
</port>
|
14426 |
</output>
|
14427 |
</layer>
|
14428 |
-
<layer id="1047" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/
|
14429 |
<data output_type="i32" />
|
14430 |
<input>
|
14431 |
<port id="0" precision="FP32">
|
@@ -14557,7 +14557,7 @@
|
|
14557 |
<port id="1" precision="I32" names="1238,1239" />
|
14558 |
</output>
|
14559 |
</layer>
|
14560 |
-
<layer id="1060" name="
|
14561 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
14562 |
<output>
|
14563 |
<port id="0" precision="I64">
|
@@ -14565,7 +14565,7 @@
|
|
14565 |
</port>
|
14566 |
</output>
|
14567 |
</layer>
|
14568 |
-
<layer id="1061" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/prim::
|
14569 |
<data special_zero="false" />
|
14570 |
<input>
|
14571 |
<port id="0" precision="I32" />
|
@@ -14715,7 +14715,7 @@
|
|
14715 |
</port>
|
14716 |
</output>
|
14717 |
</layer>
|
14718 |
-
<layer id="1070" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/
|
14719 |
<data output_type="i32" />
|
14720 |
<input>
|
14721 |
<port id="0" precision="FP32">
|
@@ -15298,7 +15298,7 @@
|
|
15298 |
</port>
|
15299 |
</output>
|
15300 |
</layer>
|
15301 |
-
<layer id="1112" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/
|
15302 |
<data output_type="i32" />
|
15303 |
<input>
|
15304 |
<port id="0" precision="FP32">
|
@@ -15567,7 +15567,7 @@
|
|
15567 |
</port>
|
15568 |
</output>
|
15569 |
</layer>
|
15570 |
-
<layer id="1132" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/
|
15571 |
<data output_type="i32" />
|
15572 |
<input>
|
15573 |
<port id="0" precision="FP32">
|
@@ -15619,7 +15619,7 @@
|
|
15619 |
<port id="1" precision="I32" names="1290,1291" />
|
15620 |
</output>
|
15621 |
</layer>
|
15622 |
-
<layer id="1137" name="
|
15623 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
15624 |
<output>
|
15625 |
<port id="0" precision="I64">
|
@@ -15627,7 +15627,7 @@
|
|
15627 |
</port>
|
15628 |
</output>
|
15629 |
</layer>
|
15630 |
-
<layer id="1138" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/prim::
|
15631 |
<data special_zero="false" />
|
15632 |
<input>
|
15633 |
<port id="0" precision="I32" />
|
@@ -17882,7 +17882,7 @@
|
|
17882 |
</port>
|
17883 |
</output>
|
17884 |
</layer>
|
17885 |
-
<layer id="1286" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/
|
17886 |
<data output_type="i32" />
|
17887 |
<input>
|
17888 |
<port id="0" precision="FP32">
|
@@ -18380,7 +18380,7 @@
|
|
18380 |
<port id="1" precision="I32" names="1367,1368" />
|
18381 |
</output>
|
18382 |
</layer>
|
18383 |
-
<layer id="1328" name="
|
18384 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
18385 |
<output>
|
18386 |
<port id="0" precision="I64">
|
@@ -18388,7 +18388,7 @@
|
|
18388 |
</port>
|
18389 |
</output>
|
18390 |
</layer>
|
18391 |
-
<layer id="1329" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/prim::
|
18392 |
<data special_zero="false" />
|
18393 |
<input>
|
18394 |
<port id="0" precision="I32" />
|
@@ -18798,7 +18798,7 @@
|
|
18798 |
</port>
|
18799 |
</output>
|
18800 |
</layer>
|
18801 |
-
<layer id="1355" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/
|
18802 |
<data output_type="i32" />
|
18803 |
<input>
|
18804 |
<port id="0" precision="FP32">
|
@@ -19162,7 +19162,7 @@
|
|
19162 |
<port id="1" precision="I32" names="1405,1406" />
|
19163 |
</output>
|
19164 |
</layer>
|
19165 |
-
<layer id="1384" name="
|
19166 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
19167 |
<output>
|
19168 |
<port id="0" precision="I64">
|
@@ -19170,7 +19170,7 @@
|
|
19170 |
</port>
|
19171 |
</output>
|
19172 |
</layer>
|
19173 |
-
<layer id="1385" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/prim::
|
19174 |
<data special_zero="false" />
|
19175 |
<input>
|
19176 |
<port id="0" precision="I32" />
|
@@ -19606,7 +19606,7 @@
|
|
19606 |
</port>
|
19607 |
</output>
|
19608 |
</layer>
|
19609 |
-
<layer id="1415" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/
|
19610 |
<data output_type="i32" />
|
19611 |
<input>
|
19612 |
<port id="0" precision="FP32">
|
@@ -20748,7 +20748,7 @@
|
|
20748 |
</port>
|
20749 |
</output>
|
20750 |
</layer>
|
20751 |
-
<layer id="1500" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/
|
20752 |
<data output_type="i32" />
|
20753 |
<input>
|
20754 |
<port id="0" precision="FP32">
|
@@ -22744,7 +22744,7 @@
|
|
22744 |
</port>
|
22745 |
</output>
|
22746 |
</layer>
|
22747 |
-
<layer id="1630" name="__module.down_blocks.2.attentions.0
|
22748 |
<data output_type="i32" />
|
22749 |
<input>
|
22750 |
<port id="0" precision="FP32">
|
@@ -23579,7 +23579,7 @@
|
|
23579 |
</port>
|
23580 |
</output>
|
23581 |
</layer>
|
23582 |
-
<layer id="1692" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
23583 |
<data output_type="i32" />
|
23584 |
<input>
|
23585 |
<port id="0" precision="FP32">
|
@@ -23711,7 +23711,7 @@
|
|
23711 |
<port id="1" precision="I32" names="1557,1558" />
|
23712 |
</output>
|
23713 |
</layer>
|
23714 |
-
<layer id="1705" name="
|
23715 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
23716 |
<output>
|
23717 |
<port id="0" precision="I64">
|
@@ -23719,7 +23719,7 @@
|
|
23719 |
</port>
|
23720 |
</output>
|
23721 |
</layer>
|
23722 |
-
<layer id="1706" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/prim::
|
23723 |
<data special_zero="false" />
|
23724 |
<input>
|
23725 |
<port id="0" precision="I32" />
|
@@ -24261,7 +24261,7 @@
|
|
24261 |
<port id="1" precision="I32" names="1571,1572" />
|
24262 |
</output>
|
24263 |
</layer>
|
24264 |
-
<layer id="1745" name="
|
24265 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
24266 |
<output>
|
24267 |
<port id="0" precision="I64">
|
@@ -24269,7 +24269,7 @@
|
|
24269 |
</port>
|
24270 |
</output>
|
24271 |
</layer>
|
24272 |
-
<layer id="1746" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/prim::
|
24273 |
<data special_zero="false" />
|
24274 |
<input>
|
24275 |
<port id="0" precision="I32" />
|
@@ -24441,7 +24441,7 @@
|
|
24441 |
</port>
|
24442 |
</output>
|
24443 |
</layer>
|
24444 |
-
<layer id="1756" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
24445 |
<data output_type="i32" />
|
24446 |
<input>
|
24447 |
<port id="0" precision="FP32">
|
@@ -24937,7 +24937,7 @@
|
|
24937 |
</port>
|
24938 |
</output>
|
24939 |
</layer>
|
24940 |
-
<layer id="1792" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/
|
24941 |
<data output_type="i32" />
|
24942 |
<input>
|
24943 |
<port id="0" precision="FP32">
|
@@ -25069,7 +25069,7 @@
|
|
25069 |
<port id="1" precision="I32" names="1616,1617" />
|
25070 |
</output>
|
25071 |
</layer>
|
25072 |
-
<layer id="1805" name="
|
25073 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
25074 |
<output>
|
25075 |
<port id="0" precision="I64">
|
@@ -25077,7 +25077,7 @@
|
|
25077 |
</port>
|
25078 |
</output>
|
25079 |
</layer>
|
25080 |
-
<layer id="1806" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn2/prim::
|
25081 |
<data special_zero="false" />
|
25082 |
<input>
|
25083 |
<port id="0" precision="I32" />
|
@@ -25303,7 +25303,7 @@
|
|
25303 |
</port>
|
25304 |
</output>
|
25305 |
</layer>
|
25306 |
-
<layer id="1821" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/
|
25307 |
<data output_type="i32" />
|
25308 |
<input>
|
25309 |
<port id="0" precision="FP32">
|
@@ -27931,7 +27931,7 @@
|
|
27931 |
</port>
|
27932 |
</output>
|
27933 |
</layer>
|
27934 |
-
<layer id="1999" name="__module.down_blocks.2.attentions.1/aten::size/
|
27935 |
<data output_type="i32" />
|
27936 |
<input>
|
27937 |
<port id="0" precision="FP32">
|
@@ -29310,7 +29310,7 @@
|
|
29310 |
</port>
|
29311 |
</output>
|
29312 |
</layer>
|
29313 |
-
<layer id="2100" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/
|
29314 |
<data output_type="i32" />
|
29315 |
<input>
|
29316 |
<port id="0" precision="FP32">
|
@@ -29442,7 +29442,7 @@
|
|
29442 |
<port id="1" precision="I32" names="1759,1760" />
|
29443 |
</output>
|
29444 |
</layer>
|
29445 |
-
<layer id="2113" name="
|
29446 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
29447 |
<output>
|
29448 |
<port id="0" precision="I64">
|
@@ -29450,7 +29450,7 @@
|
|
29450 |
</port>
|
29451 |
</output>
|
29452 |
</layer>
|
29453 |
-
<layer id="2114" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn1/prim::
|
29454 |
<data special_zero="false" />
|
29455 |
<input>
|
29456 |
<port id="0" precision="I32" />
|
@@ -30118,7 +30118,7 @@
|
|
30118 |
</port>
|
30119 |
</output>
|
30120 |
</layer>
|
30121 |
-
<layer id="2160" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
30122 |
<data output_type="i32" />
|
30123 |
<input>
|
30124 |
<port id="0" precision="FP32">
|
@@ -30250,7 +30250,7 @@
|
|
30250 |
<port id="1" precision="I32" names="1804,1805" />
|
30251 |
</output>
|
30252 |
</layer>
|
30253 |
-
<layer id="2173" name="
|
30254 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
30255 |
<output>
|
30256 |
<port id="0" precision="I64">
|
@@ -30258,7 +30258,7 @@
|
|
30258 |
</port>
|
30259 |
</output>
|
30260 |
</layer>
|
30261 |
-
<layer id="2174" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/prim::
|
30262 |
<data special_zero="false" />
|
30263 |
<input>
|
30264 |
<port id="0" precision="I32" />
|
@@ -30408,7 +30408,7 @@
|
|
30408 |
</port>
|
30409 |
</output>
|
30410 |
</layer>
|
30411 |
-
<layer id="2183" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
30412 |
<data output_type="i32" />
|
30413 |
<input>
|
30414 |
<port id="0" precision="FP32">
|
@@ -30484,7 +30484,7 @@
|
|
30484 |
</port>
|
30485 |
</output>
|
30486 |
</layer>
|
30487 |
-
<layer id="2189" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
30488 |
<data output_type="i32" />
|
30489 |
<input>
|
30490 |
<port id="0" precision="FP32">
|
@@ -30991,7 +30991,7 @@
|
|
30991 |
</port>
|
30992 |
</output>
|
30993 |
</layer>
|
30994 |
-
<layer id="2225" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
30995 |
<data output_type="i32" />
|
30996 |
<input>
|
30997 |
<port id="0" precision="FP32">
|
@@ -31260,7 +31260,7 @@
|
|
31260 |
</port>
|
31261 |
</output>
|
31262 |
</layer>
|
31263 |
-
<layer id="2245" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
31264 |
<data output_type="i32" />
|
31265 |
<input>
|
31266 |
<port id="0" precision="FP32">
|
@@ -31312,7 +31312,7 @@
|
|
31312 |
<port id="1" precision="I32" names="1856,1857" />
|
31313 |
</output>
|
31314 |
</layer>
|
31315 |
-
<layer id="2250" name="
|
31316 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
31317 |
<output>
|
31318 |
<port id="0" precision="I64">
|
@@ -31320,7 +31320,7 @@
|
|
31320 |
</port>
|
31321 |
</output>
|
31322 |
</layer>
|
31323 |
-
<layer id="2251" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/prim::
|
31324 |
<data special_zero="false" />
|
31325 |
<input>
|
31326 |
<port id="0" precision="I32" />
|
@@ -34680,7 +34680,7 @@
|
|
34680 |
</port>
|
34681 |
</output>
|
34682 |
</layer>
|
34683 |
-
<layer id="2463" name="__module.mid_block.attentions.0/aten::size/
|
34684 |
<data output_type="i32" />
|
34685 |
<input>
|
34686 |
<port id="0" precision="FP32">
|
@@ -35143,7 +35143,7 @@
|
|
35143 |
</port>
|
35144 |
</output>
|
35145 |
</layer>
|
35146 |
-
<layer id="2495" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/aten::size/
|
35147 |
<data output_type="i32" />
|
35148 |
<input>
|
35149 |
<port id="0" precision="FP32">
|
@@ -35641,7 +35641,7 @@
|
|
35641 |
<port id="1" precision="I32" names="1962,1963" />
|
35642 |
</output>
|
35643 |
</layer>
|
35644 |
-
<layer id="2537" name="
|
35645 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
35646 |
<output>
|
35647 |
<port id="0" precision="I64">
|
@@ -35649,7 +35649,7 @@
|
|
35649 |
</port>
|
35650 |
</output>
|
35651 |
</layer>
|
35652 |
-
<layer id="2538" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/prim::
|
35653 |
<data special_zero="false" />
|
35654 |
<input>
|
35655 |
<port id="0" precision="I32" />
|
@@ -36059,7 +36059,7 @@
|
|
36059 |
</port>
|
36060 |
</output>
|
36061 |
</layer>
|
36062 |
-
<layer id="2564" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/aten::size/
|
36063 |
<data output_type="i32" />
|
36064 |
<input>
|
36065 |
<port id="0" precision="FP32">
|
@@ -36191,7 +36191,7 @@
|
|
36191 |
<port id="1" precision="I32" names="1976,1977" />
|
36192 |
</output>
|
36193 |
</layer>
|
36194 |
-
<layer id="2577" name="
|
36195 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
36196 |
<output>
|
36197 |
<port id="0" precision="I64">
|
@@ -36199,7 +36199,7 @@
|
|
36199 |
</port>
|
36200 |
</output>
|
36201 |
</layer>
|
36202 |
-
<layer id="2578" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/prim::
|
36203 |
<data special_zero="false" />
|
36204 |
<input>
|
36205 |
<port id="0" precision="I32" />
|
@@ -36371,7 +36371,7 @@
|
|
36371 |
</port>
|
36372 |
</output>
|
36373 |
</layer>
|
36374 |
-
<layer id="2588" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/aten::size/
|
36375 |
<data output_type="i32" />
|
36376 |
<input>
|
36377 |
<port id="0" precision="FP32">
|
@@ -36867,7 +36867,7 @@
|
|
36867 |
</port>
|
36868 |
</output>
|
36869 |
</layer>
|
36870 |
-
<layer id="2624" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/aten::size/
|
36871 |
<data output_type="i32" />
|
36872 |
<input>
|
36873 |
<port id="0" precision="FP32">
|
@@ -36999,7 +36999,7 @@
|
|
36999 |
<port id="1" precision="I32" names="2021,2022" />
|
37000 |
</output>
|
37001 |
</layer>
|
37002 |
-
<layer id="2637" name="
|
37003 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
37004 |
<output>
|
37005 |
<port id="0" precision="I64">
|
@@ -37007,7 +37007,7 @@
|
|
37007 |
</port>
|
37008 |
</output>
|
37009 |
</layer>
|
37010 |
-
<layer id="2638" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/prim::
|
37011 |
<data special_zero="false" />
|
37012 |
<input>
|
37013 |
<port id="0" precision="I32" />
|
@@ -37740,7 +37740,7 @@
|
|
37740 |
</port>
|
37741 |
</output>
|
37742 |
</layer>
|
37743 |
-
<layer id="2689" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/aten::size/
|
37744 |
<data output_type="i32" />
|
37745 |
<input>
|
37746 |
<port id="0" precision="FP32">
|
@@ -38009,7 +38009,7 @@
|
|
38009 |
</port>
|
38010 |
</output>
|
38011 |
</layer>
|
38012 |
-
<layer id="2709" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/aten::size/
|
38013 |
<data output_type="i32" />
|
38014 |
<input>
|
38015 |
<port id="0" precision="FP32">
|
@@ -43354,7 +43354,7 @@
|
|
43354 |
</port>
|
43355 |
</output>
|
43356 |
</layer>
|
43357 |
-
<layer id="3042" name="__module.up_blocks.1.attentions.0
|
43358 |
<data output_type="i32" />
|
43359 |
<input>
|
43360 |
<port id="0" precision="FP32">
|
@@ -44315,7 +44315,7 @@
|
|
44315 |
<port id="1" precision="I32" names="2220,2221" />
|
44316 |
</output>
|
44317 |
</layer>
|
44318 |
-
<layer id="3116" name="
|
44319 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
44320 |
<output>
|
44321 |
<port id="0" precision="I64">
|
@@ -44323,7 +44323,7 @@
|
|
44323 |
</port>
|
44324 |
</output>
|
44325 |
</layer>
|
44326 |
-
<layer id="3117" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::
|
44327 |
<data special_zero="false" />
|
44328 |
<input>
|
44329 |
<port id="0" precision="I32" />
|
@@ -44865,7 +44865,7 @@
|
|
44865 |
<port id="1" precision="I32" names="2234,2235" />
|
44866 |
</output>
|
44867 |
</layer>
|
44868 |
-
<layer id="3156" name="
|
44869 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
44870 |
<output>
|
44871 |
<port id="0" precision="I64">
|
@@ -44873,7 +44873,7 @@
|
|
44873 |
</port>
|
44874 |
</output>
|
44875 |
</layer>
|
44876 |
-
<layer id="3157" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::
|
44877 |
<data special_zero="false" />
|
44878 |
<input>
|
44879 |
<port id="0" precision="I32" />
|
@@ -45097,7 +45097,7 @@
|
|
45097 |
<port id="1" precision="I32" names="2258,2259" />
|
45098 |
</output>
|
45099 |
</layer>
|
45100 |
-
<layer id="3172" name="
|
45101 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
45102 |
<output>
|
45103 |
<port id="0" precision="I64">
|
@@ -45105,7 +45105,7 @@
|
|
45105 |
</port>
|
45106 |
</output>
|
45107 |
</layer>
|
45108 |
-
<layer id="3173" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::
|
45109 |
<data special_zero="false" />
|
45110 |
<input>
|
45111 |
<port id="0" precision="I32" />
|
@@ -45831,7 +45831,7 @@
|
|
45831 |
</port>
|
45832 |
</output>
|
45833 |
</layer>
|
45834 |
-
<layer id="3226" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/
|
45835 |
<data output_type="i32" />
|
45836 |
<input>
|
45837 |
<port id="0" precision="FP32">
|
@@ -46414,7 +46414,7 @@
|
|
46414 |
</port>
|
46415 |
</output>
|
46416 |
</layer>
|
46417 |
-
<layer id="3268" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/
|
46418 |
<data output_type="i32" />
|
46419 |
<input>
|
46420 |
<port id="0" precision="FP32">
|
@@ -49095,7 +49095,7 @@
|
|
49095 |
</port>
|
49096 |
</output>
|
49097 |
</layer>
|
49098 |
-
<layer id="3447" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/
|
49099 |
<data output_type="i32" />
|
49100 |
<input>
|
49101 |
<port id="0" precision="FP32">
|
@@ -49385,7 +49385,7 @@
|
|
49385 |
</port>
|
49386 |
</output>
|
49387 |
</layer>
|
49388 |
-
<layer id="3470" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/
|
49389 |
<data output_type="i32" />
|
49390 |
<input>
|
49391 |
<port id="0" precision="FP32">
|
@@ -50143,7 +50143,7 @@
|
|
50143 |
<port id="1" precision="I32" names="2425,2426" />
|
50144 |
</output>
|
50145 |
</layer>
|
50146 |
-
<layer id="3529" name="
|
50147 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
50148 |
<output>
|
50149 |
<port id="0" precision="I64">
|
@@ -50151,7 +50151,7 @@
|
|
50151 |
</port>
|
50152 |
</output>
|
50153 |
</layer>
|
50154 |
-
<layer id="3530" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn1/prim::
|
50155 |
<data special_zero="false" />
|
50156 |
<input>
|
50157 |
<port id="0" precision="I32" />
|
@@ -51692,7 +51692,7 @@
|
|
51692 |
</port>
|
51693 |
</output>
|
51694 |
</layer>
|
51695 |
-
<layer id="3641" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/
|
51696 |
<data output_type="i32" />
|
51697 |
<input>
|
51698 |
<port id="0" precision="FP32">
|
@@ -51961,7 +51961,7 @@
|
|
51961 |
</port>
|
51962 |
</output>
|
51963 |
</layer>
|
51964 |
-
<layer id="3661" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/
|
51965 |
<data output_type="i32" />
|
51966 |
<input>
|
51967 |
<port id="0" precision="FP32">
|
@@ -53910,7 +53910,7 @@
|
|
53910 |
</port>
|
53911 |
</output>
|
53912 |
</layer>
|
53913 |
-
<layer id="3788" name="__module.up_blocks.1.attentions.2
|
53914 |
<data output_type="i32" />
|
53915 |
<input>
|
53916 |
<port id="0" precision="FP32">
|
@@ -54739,7 +54739,7 @@
|
|
54739 |
</port>
|
54740 |
</output>
|
54741 |
</layer>
|
54742 |
-
<layer id="3849" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/aten::size/
|
54743 |
<data output_type="i32" />
|
54744 |
<input>
|
54745 |
<port id="0" precision="FP32">
|
@@ -54871,7 +54871,7 @@
|
|
54871 |
<port id="1" precision="I32" names="2602,2603" />
|
54872 |
</output>
|
54873 |
</layer>
|
54874 |
-
<layer id="3862" name="
|
54875 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
54876 |
<output>
|
54877 |
<port id="0" precision="I64">
|
@@ -54879,7 +54879,7 @@
|
|
54879 |
</port>
|
54880 |
</output>
|
54881 |
</layer>
|
54882 |
-
<layer id="3863" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/prim::
|
54883 |
<data special_zero="false" />
|
54884 |
<input>
|
54885 |
<port id="0" precision="I32" />
|
@@ -55289,7 +55289,7 @@
|
|
55289 |
</port>
|
55290 |
</output>
|
55291 |
</layer>
|
55292 |
-
<layer id="3889" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/aten::size/
|
55293 |
<data output_type="i32" />
|
55294 |
<input>
|
55295 |
<port id="0" precision="FP32">
|
@@ -55421,7 +55421,7 @@
|
|
55421 |
<port id="1" precision="I32" names="2616,2617" />
|
55422 |
</output>
|
55423 |
</layer>
|
55424 |
-
<layer id="3902" name="
|
55425 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
55426 |
<output>
|
55427 |
<port id="0" precision="I64">
|
@@ -55429,7 +55429,7 @@
|
|
55429 |
</port>
|
55430 |
</output>
|
55431 |
</layer>
|
55432 |
-
<layer id="3903" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/prim::
|
55433 |
<data special_zero="false" />
|
55434 |
<input>
|
55435 |
<port id="0" precision="I32" />
|
@@ -56097,7 +56097,7 @@
|
|
56097 |
</port>
|
56098 |
</output>
|
56099 |
</layer>
|
56100 |
-
<layer id="3949" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/aten::size/
|
56101 |
<data output_type="i32" />
|
56102 |
<input>
|
56103 |
<port id="0" precision="FP32">
|
@@ -56229,7 +56229,7 @@
|
|
56229 |
<port id="1" precision="I32" names="2661,2662" />
|
56230 |
</output>
|
56231 |
</layer>
|
56232 |
-
<layer id="3962" name="
|
56233 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
56234 |
<output>
|
56235 |
<port id="0" precision="I64">
|
@@ -56237,7 +56237,7 @@
|
|
56237 |
</port>
|
56238 |
</output>
|
56239 |
</layer>
|
56240 |
-
<layer id="3963" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/prim::
|
56241 |
<data special_zero="false" />
|
56242 |
<input>
|
56243 |
<port id="0" precision="I32" />
|
@@ -56463,7 +56463,7 @@
|
|
56463 |
</port>
|
56464 |
</output>
|
56465 |
</layer>
|
56466 |
-
<layer id="3978" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/aten::size/
|
56467 |
<data output_type="i32" />
|
56468 |
<input>
|
56469 |
<port id="0" precision="FP32">
|
@@ -57239,7 +57239,7 @@
|
|
57239 |
</port>
|
57240 |
</output>
|
57241 |
</layer>
|
57242 |
-
<layer id="4034" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/aten::size/
|
57243 |
<data output_type="i32" />
|
57244 |
<input>
|
57245 |
<port id="0" precision="FP32">
|
@@ -57291,7 +57291,7 @@
|
|
57291 |
<port id="1" precision="I32" names="2713,2714" />
|
57292 |
</output>
|
57293 |
</layer>
|
57294 |
-
<layer id="4039" name="
|
57295 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
57296 |
<output>
|
57297 |
<port id="0" precision="I64">
|
@@ -57299,7 +57299,7 @@
|
|
57299 |
</port>
|
57300 |
</output>
|
57301 |
</layer>
|
57302 |
-
<layer id="4040" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/prim::
|
57303 |
<data special_zero="false" />
|
57304 |
<input>
|
57305 |
<port id="0" precision="I32" />
|
@@ -59301,7 +59301,7 @@
|
|
59301 |
</port>
|
59302 |
</output>
|
59303 |
</layer>
|
59304 |
-
<layer id="4168" name="__module.up_blocks.2.attentions.0/aten::size/
|
59305 |
<data output_type="i32" />
|
59306 |
<input>
|
59307 |
<port id="0" precision="FP32">
|
@@ -59764,7 +59764,7 @@
|
|
59764 |
</port>
|
59765 |
</output>
|
59766 |
</layer>
|
59767 |
-
<layer id="4200" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
59768 |
<data output_type="i32" />
|
59769 |
<input>
|
59770 |
<port id="0" precision="FP32">
|
@@ -60054,7 +60054,7 @@
|
|
60054 |
</port>
|
60055 |
</output>
|
60056 |
</layer>
|
60057 |
-
<layer id="4223" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
60058 |
<data output_type="i32" />
|
60059 |
<input>
|
60060 |
<port id="0" precision="FP32">
|
@@ -60130,7 +60130,7 @@
|
|
60130 |
</port>
|
60131 |
</output>
|
60132 |
</layer>
|
60133 |
-
<layer id="4229" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
60134 |
<data output_type="i32" />
|
60135 |
<input>
|
60136 |
<port id="0" precision="FP32">
|
@@ -60680,7 +60680,7 @@
|
|
60680 |
</port>
|
60681 |
</output>
|
60682 |
</layer>
|
60683 |
-
<layer id="4269" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
60684 |
<data output_type="i32" />
|
60685 |
<input>
|
60686 |
<port id="0" precision="FP32">
|
@@ -60812,7 +60812,7 @@
|
|
60812 |
<port id="1" precision="I32" names="2809,2810" />
|
60813 |
</output>
|
60814 |
</layer>
|
60815 |
-
<layer id="4282" name="
|
60816 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
60817 |
<output>
|
60818 |
<port id="0" precision="I64">
|
@@ -60820,7 +60820,7 @@
|
|
60820 |
</port>
|
60821 |
</output>
|
60822 |
</layer>
|
60823 |
-
<layer id="4283" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/prim::
|
60824 |
<data special_zero="false" />
|
60825 |
<input>
|
60826 |
<port id="0" precision="I32" />
|
@@ -60992,7 +60992,7 @@
|
|
60992 |
</port>
|
60993 |
</output>
|
60994 |
</layer>
|
60995 |
-
<layer id="4293" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/
|
60996 |
<data output_type="i32" />
|
60997 |
<input>
|
60998 |
<port id="0" precision="FP32">
|
@@ -61488,7 +61488,7 @@
|
|
61488 |
</port>
|
61489 |
</output>
|
61490 |
</layer>
|
61491 |
-
<layer id="4329" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/
|
61492 |
<data output_type="i32" />
|
61493 |
<input>
|
61494 |
<port id="0" precision="FP32">
|
@@ -61854,7 +61854,7 @@
|
|
61854 |
</port>
|
61855 |
</output>
|
61856 |
</layer>
|
61857 |
-
<layer id="4358" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/
|
61858 |
<data output_type="i32" />
|
61859 |
<input>
|
61860 |
<port id="0" precision="FP32">
|
@@ -64579,7 +64579,7 @@
|
|
64579 |
</port>
|
64580 |
</output>
|
64581 |
</layer>
|
64582 |
-
<layer id="4541" name="__module.up_blocks.2.attentions.1/aten::size/
|
64583 |
<data output_type="i32" />
|
64584 |
<input>
|
64585 |
<port id="0" precision="FP32">
|
@@ -65174,7 +65174,7 @@
|
|
65174 |
<port id="1" precision="I32" names="2972,2973" />
|
65175 |
</output>
|
65176 |
</layer>
|
65177 |
-
<layer id="4586" name="
|
65178 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
65179 |
<output>
|
65180 |
<port id="0" precision="I64">
|
@@ -65182,7 +65182,7 @@
|
|
65182 |
</port>
|
65183 |
</output>
|
65184 |
</layer>
|
65185 |
-
<layer id="4587" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/prim::
|
65186 |
<data special_zero="false" />
|
65187 |
<input>
|
65188 |
<port id="0" precision="I32" />
|
@@ -65332,7 +65332,7 @@
|
|
65332 |
</port>
|
65333 |
</output>
|
65334 |
</layer>
|
65335 |
-
<layer id="4596" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/
|
65336 |
<data output_type="i32" />
|
65337 |
<input>
|
65338 |
<port id="0" precision="FP32">
|
@@ -65408,7 +65408,7 @@
|
|
65408 |
</port>
|
65409 |
</output>
|
65410 |
</layer>
|
65411 |
-
<layer id="4602" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/
|
65412 |
<data output_type="i32" />
|
65413 |
<input>
|
65414 |
<port id="0" precision="FP32">
|
@@ -65958,7 +65958,7 @@
|
|
65958 |
</port>
|
65959 |
</output>
|
65960 |
</layer>
|
65961 |
-
<layer id="4642" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/
|
65962 |
<data output_type="i32" />
|
65963 |
<input>
|
65964 |
<port id="0" precision="FP32">
|
@@ -66090,7 +66090,7 @@
|
|
66090 |
<port id="1" precision="I32" names="3000,3001" />
|
66091 |
</output>
|
66092 |
</layer>
|
66093 |
-
<layer id="4655" name="
|
66094 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
66095 |
<output>
|
66096 |
<port id="0" precision="I64">
|
@@ -66098,7 +66098,7 @@
|
|
66098 |
</port>
|
66099 |
</output>
|
66100 |
</layer>
|
66101 |
-
<layer id="4656" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/prim::
|
66102 |
<data special_zero="false" />
|
66103 |
<input>
|
66104 |
<port id="0" precision="I32" />
|
@@ -66270,7 +66270,7 @@
|
|
66270 |
</port>
|
66271 |
</output>
|
66272 |
</layer>
|
66273 |
-
<layer id="4666" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/
|
66274 |
<data output_type="i32" />
|
66275 |
<input>
|
66276 |
<port id="0" precision="FP32">
|
@@ -66766,7 +66766,7 @@
|
|
66766 |
</port>
|
66767 |
</output>
|
66768 |
</layer>
|
66769 |
-
<layer id="4702" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
66770 |
<data output_type="i32" />
|
66771 |
<input>
|
66772 |
<port id="0" precision="FP32">
|
@@ -67056,7 +67056,7 @@
|
|
67056 |
</port>
|
67057 |
</output>
|
67058 |
</layer>
|
67059 |
-
<layer id="4725" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
67060 |
<data output_type="i32" />
|
67061 |
<input>
|
67062 |
<port id="0" precision="FP32">
|
@@ -67132,7 +67132,7 @@
|
|
67132 |
</port>
|
67133 |
</output>
|
67134 |
</layer>
|
67135 |
-
<layer id="4731" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
67136 |
<data output_type="i32" />
|
67137 |
<input>
|
67138 |
<port id="0" precision="FP32">
|
@@ -67639,7 +67639,7 @@
|
|
67639 |
</port>
|
67640 |
</output>
|
67641 |
</layer>
|
67642 |
-
<layer id="4767" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
67643 |
<data output_type="i32" />
|
67644 |
<input>
|
67645 |
<port id="0" precision="FP32">
|
@@ -67908,7 +67908,7 @@
|
|
67908 |
</port>
|
67909 |
</output>
|
67910 |
</layer>
|
67911 |
-
<layer id="4787" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/
|
67912 |
<data output_type="i32" />
|
67913 |
<input>
|
67914 |
<port id="0" precision="FP32">
|
@@ -69857,7 +69857,7 @@
|
|
69857 |
</port>
|
69858 |
</output>
|
69859 |
</layer>
|
69860 |
-
<layer id="4914" name="__module.up_blocks.2.attentions.2/aten::size/
|
69861 |
<data output_type="i32" />
|
69862 |
<input>
|
69863 |
<port id="0" precision="FP32">
|
@@ -70320,7 +70320,7 @@
|
|
70320 |
</port>
|
70321 |
</output>
|
70322 |
</layer>
|
70323 |
-
<layer id="4946" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn1/aten::size/
|
70324 |
<data output_type="i32" />
|
70325 |
<input>
|
70326 |
<port id="0" precision="FP32">
|
@@ -71236,7 +71236,7 @@
|
|
71236 |
</port>
|
71237 |
</output>
|
71238 |
</layer>
|
71239 |
-
<layer id="5015" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn1/aten::size/
|
71240 |
<data output_type="i32" />
|
71241 |
<input>
|
71242 |
<port id="0" precision="FP32">
|
@@ -71600,7 +71600,7 @@
|
|
71600 |
<port id="1" precision="I32" names="3215,3216" />
|
71601 |
</output>
|
71602 |
</layer>
|
71603 |
-
<layer id="5044" name="
|
71604 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
71605 |
<output>
|
71606 |
<port id="0" precision="I64">
|
@@ -71608,7 +71608,7 @@
|
|
71608 |
</port>
|
71609 |
</output>
|
71610 |
</layer>
|
71611 |
-
<layer id="5045" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn1/prim::
|
71612 |
<data special_zero="false" />
|
71613 |
<input>
|
71614 |
<port id="0" precision="I32" />
|
@@ -72044,7 +72044,7 @@
|
|
72044 |
</port>
|
72045 |
</output>
|
72046 |
</layer>
|
72047 |
-
<layer id="5075" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn2/aten::size/
|
72048 |
<data output_type="i32" />
|
72049 |
<input>
|
72050 |
<port id="0" precision="FP32">
|
@@ -72176,7 +72176,7 @@
|
|
72176 |
<port id="1" precision="I32" names="3236,3237" />
|
72177 |
</output>
|
72178 |
</layer>
|
72179 |
-
<layer id="5088" name="
|
72180 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
72181 |
<output>
|
72182 |
<port id="0" precision="I64">
|
@@ -72184,7 +72184,7 @@
|
|
72184 |
</port>
|
72185 |
</output>
|
72186 |
</layer>
|
72187 |
-
<layer id="5089" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn2/prim::
|
72188 |
<data special_zero="false" />
|
72189 |
<input>
|
72190 |
<port id="0" precision="I32" />
|
@@ -73186,7 +73186,7 @@
|
|
73186 |
</port>
|
73187 |
</output>
|
73188 |
</layer>
|
73189 |
-
<layer id="5160" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn2/aten::size/
|
73190 |
<data output_type="i32" />
|
73191 |
<input>
|
73192 |
<port id="0" precision="FP32">
|
@@ -75248,7 +75248,7 @@
|
|
75248 |
</port>
|
75249 |
</output>
|
75250 |
</layer>
|
75251 |
-
<layer id="5294" name="__module.up_blocks.3.attentions.0/aten::size/
|
75252 |
<data output_type="i32" />
|
75253 |
<input>
|
75254 |
<port id="0" precision="FP32">
|
@@ -76001,7 +76001,7 @@
|
|
76001 |
</port>
|
76002 |
</output>
|
76003 |
</layer>
|
76004 |
-
<layer id="5349" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/aten::size/
|
76005 |
<data output_type="i32" />
|
76006 |
<input>
|
76007 |
<port id="0" precision="FP32">
|
@@ -76627,7 +76627,7 @@
|
|
76627 |
</port>
|
76628 |
</output>
|
76629 |
</layer>
|
76630 |
-
<layer id="5395" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/aten::size/
|
76631 |
<data output_type="i32" />
|
76632 |
<input>
|
76633 |
<port id="0" precision="FP32">
|
@@ -76759,7 +76759,7 @@
|
|
76759 |
<port id="1" precision="I32" names="3384,3385" />
|
76760 |
</output>
|
76761 |
</layer>
|
76762 |
-
<layer id="5408" name="
|
76763 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
76764 |
<output>
|
76765 |
<port id="0" precision="I64">
|
@@ -76767,7 +76767,7 @@
|
|
76767 |
</port>
|
76768 |
</output>
|
76769 |
</layer>
|
76770 |
-
<layer id="5409" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/prim::
|
76771 |
<data special_zero="false" />
|
76772 |
<input>
|
76773 |
<port id="0" precision="I32" />
|
@@ -76991,7 +76991,7 @@
|
|
76991 |
<port id="1" precision="I32" names="3408,3409" />
|
76992 |
</output>
|
76993 |
</layer>
|
76994 |
-
<layer id="5424" name="
|
76995 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
76996 |
<output>
|
76997 |
<port id="0" precision="I64">
|
@@ -76999,7 +76999,7 @@
|
|
76999 |
</port>
|
77000 |
</output>
|
77001 |
</layer>
|
77002 |
-
<layer id="5425" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/prim::
|
77003 |
<data special_zero="false" />
|
77004 |
<input>
|
77005 |
<port id="0" precision="I32" />
|
@@ -77567,7 +77567,7 @@
|
|
77567 |
<port id="1" precision="I32" names="3429,3430" />
|
77568 |
</output>
|
77569 |
</layer>
|
77570 |
-
<layer id="5468" name="
|
77571 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
77572 |
<output>
|
77573 |
<port id="0" precision="I64">
|
@@ -77575,7 +77575,7 @@
|
|
77575 |
</port>
|
77576 |
</output>
|
77577 |
</layer>
|
77578 |
-
<layer id="5469" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/prim::
|
77579 |
<data special_zero="false" />
|
77580 |
<input>
|
77581 |
<port id="0" precision="I32" />
|
@@ -77801,7 +77801,7 @@
|
|
77801 |
</port>
|
77802 |
</output>
|
77803 |
</layer>
|
77804 |
-
<layer id="5484" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/aten::size/
|
77805 |
<data output_type="i32" />
|
77806 |
<input>
|
77807 |
<port id="0" precision="FP32">
|
@@ -78308,7 +78308,7 @@
|
|
78308 |
</port>
|
78309 |
</output>
|
78310 |
</layer>
|
78311 |
-
<layer id="5520" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/aten::size/
|
78312 |
<data output_type="i32" />
|
78313 |
<input>
|
78314 |
<port id="0" precision="FP32">
|
@@ -78629,7 +78629,7 @@
|
|
78629 |
<port id="1" precision="I32" names="3481,3482" />
|
78630 |
</output>
|
78631 |
</layer>
|
78632 |
-
<layer id="5545" name="
|
78633 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
78634 |
<output>
|
78635 |
<port id="0" precision="I64">
|
@@ -78637,7 +78637,7 @@
|
|
78637 |
</port>
|
78638 |
</output>
|
78639 |
</layer>
|
78640 |
-
<layer id="5546" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/prim::
|
78641 |
<data special_zero="false" />
|
78642 |
<input>
|
78643 |
<port id="0" precision="I32" />
|
@@ -80526,7 +80526,7 @@
|
|
80526 |
</port>
|
80527 |
</output>
|
80528 |
</layer>
|
80529 |
-
<layer id="5667" name="__module.up_blocks.3.attentions.1/aten::size/
|
80530 |
<data output_type="i32" />
|
80531 |
<input>
|
80532 |
<port id="0" precision="FP32">
|
@@ -81121,7 +81121,7 @@
|
|
81121 |
<port id="1" precision="I32" names="3547,3548" />
|
81122 |
</output>
|
81123 |
</layer>
|
81124 |
-
<layer id="5712" name="
|
81125 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
81126 |
<output>
|
81127 |
<port id="0" precision="I64">
|
@@ -81129,7 +81129,7 @@
|
|
81129 |
</port>
|
81130 |
</output>
|
81131 |
</layer>
|
81132 |
-
<layer id="5713" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/prim::
|
81133 |
<data special_zero="false" />
|
81134 |
<input>
|
81135 |
<port id="0" precision="I32" />
|
@@ -81279,7 +81279,7 @@
|
|
81279 |
</port>
|
81280 |
</output>
|
81281 |
</layer>
|
81282 |
-
<layer id="5722" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/aten::size/
|
81283 |
<data output_type="i32" />
|
81284 |
<input>
|
81285 |
<port id="0" precision="FP32">
|
@@ -81355,7 +81355,7 @@
|
|
81355 |
</port>
|
81356 |
</output>
|
81357 |
</layer>
|
81358 |
-
<layer id="5728" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/aten::size/
|
81359 |
<data output_type="i32" />
|
81360 |
<input>
|
81361 |
<port id="0" precision="FP32">
|
@@ -81487,7 +81487,7 @@
|
|
81487 |
<port id="1" precision="I32" names="3561,3562" />
|
81488 |
</output>
|
81489 |
</layer>
|
81490 |
-
<layer id="5741" name="
|
81491 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
81492 |
<output>
|
81493 |
<port id="0" precision="I64">
|
@@ -81495,7 +81495,7 @@
|
|
81495 |
</port>
|
81496 |
</output>
|
81497 |
</layer>
|
81498 |
-
<layer id="5742" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/prim::
|
81499 |
<data special_zero="false" />
|
81500 |
<input>
|
81501 |
<port id="0" precision="I32" />
|
@@ -82217,7 +82217,7 @@
|
|
82217 |
</port>
|
82218 |
</output>
|
82219 |
</layer>
|
82220 |
-
<layer id="5792" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/aten::size/
|
82221 |
<data output_type="i32" />
|
82222 |
<input>
|
82223 |
<port id="0" precision="FP32">
|
@@ -83079,7 +83079,7 @@
|
|
83079 |
</port>
|
83080 |
</output>
|
83081 |
</layer>
|
83082 |
-
<layer id="5857" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn2/aten::size/
|
83083 |
<data output_type="i32" />
|
83084 |
<input>
|
83085 |
<port id="0" precision="FP32">
|
@@ -83586,7 +83586,7 @@
|
|
83586 |
</port>
|
83587 |
</output>
|
83588 |
</layer>
|
83589 |
-
<layer id="5893" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn2/aten::size/
|
83590 |
<data output_type="i32" />
|
83591 |
<input>
|
83592 |
<port id="0" precision="FP32">
|
@@ -83855,7 +83855,7 @@
|
|
83855 |
</port>
|
83856 |
</output>
|
83857 |
</layer>
|
83858 |
-
<layer id="5913" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn2/aten::size/
|
83859 |
<data output_type="i32" />
|
83860 |
<input>
|
83861 |
<port id="0" precision="FP32">
|
@@ -86399,7 +86399,7 @@
|
|
86399 |
<port id="1" precision="I32" names="3738,3739" />
|
86400 |
</output>
|
86401 |
</layer>
|
86402 |
-
<layer id="6085" name="
|
86403 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
86404 |
<output>
|
86405 |
<port id="0" precision="I64">
|
@@ -86407,7 +86407,7 @@
|
|
86407 |
</port>
|
86408 |
</output>
|
86409 |
</layer>
|
86410 |
-
<layer id="6086" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/prim::
|
86411 |
<data special_zero="false" />
|
86412 |
<input>
|
86413 |
<port id="0" precision="I32" />
|
@@ -86633,7 +86633,7 @@
|
|
86633 |
</port>
|
86634 |
</output>
|
86635 |
</layer>
|
86636 |
-
<layer id="6101" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/aten::size/
|
86637 |
<data output_type="i32" />
|
86638 |
<input>
|
86639 |
<port id="0" precision="FP32">
|
@@ -87183,7 +87183,7 @@
|
|
87183 |
</port>
|
87184 |
</output>
|
87185 |
</layer>
|
87186 |
-
<layer id="6141" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/aten::size/
|
87187 |
<data output_type="i32" />
|
87188 |
<input>
|
87189 |
<port id="0" precision="FP32">
|
@@ -87315,7 +87315,7 @@
|
|
87315 |
<port id="1" precision="I32" names="3766,3767" />
|
87316 |
</output>
|
87317 |
</layer>
|
87318 |
-
<layer id="6154" name="
|
87319 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
87320 |
<output>
|
87321 |
<port id="0" precision="I64">
|
@@ -87323,7 +87323,7 @@
|
|
87323 |
</port>
|
87324 |
</output>
|
87325 |
</layer>
|
87326 |
-
<layer id="6155" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/prim::
|
87327 |
<data special_zero="false" />
|
87328 |
<input>
|
87329 |
<port id="0" precision="I32" />
|
@@ -87495,7 +87495,7 @@
|
|
87495 |
</port>
|
87496 |
</output>
|
87497 |
</layer>
|
87498 |
-
<layer id="6165" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/aten::size/
|
87499 |
<data output_type="i32" />
|
87500 |
<input>
|
87501 |
<port id="0" precision="FP32">
|
@@ -89133,7 +89133,7 @@
|
|
89133 |
</port>
|
89134 |
</output>
|
89135 |
</layer>
|
89136 |
-
<layer id="6286" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn2/aten::size/
|
89137 |
<data output_type="i32" />
|
89138 |
<input>
|
89139 |
<port id="0" precision="FP32">
|
|
|
1684 |
</port>
|
1685 |
</output>
|
1686 |
</layer>
|
1687 |
+
<layer id="135" name="__module.down_blocks.0.attentions.0/aten::size/ShapeOf_74" type="ShapeOf" version="opset3">
|
1688 |
<data output_type="i32" />
|
1689 |
<input>
|
1690 |
<port id="0" precision="FP32">
|
|
|
2465 |
</port>
|
2466 |
</output>
|
2467 |
</layer>
|
2468 |
+
<layer id="194" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_155" type="ShapeOf" version="opset3">
|
2469 |
<data output_type="i32" />
|
2470 |
<input>
|
2471 |
<port id="0" precision="FP32">
|
|
|
2541 |
</port>
|
2542 |
</output>
|
2543 |
</layer>
|
2544 |
+
<layer id="200" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_109" type="ShapeOf" version="opset3">
|
2545 |
<data output_type="i32" />
|
2546 |
<input>
|
2547 |
<port id="0" precision="FP32">
|
|
|
3899 |
</port>
|
3900 |
</output>
|
3901 |
</layer>
|
3902 |
+
<layer id="300" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_202" type="ShapeOf" version="opset3">
|
3903 |
<data output_type="i32" />
|
3904 |
<input>
|
3905 |
<port id="0" precision="FP32">
|
|
|
4189 |
</port>
|
4190 |
</output>
|
4191 |
</layer>
|
4192 |
+
<layer id="323" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_260" type="ShapeOf" version="opset3">
|
4193 |
<data output_type="i32" />
|
4194 |
<input>
|
4195 |
<port id="0" precision="FP32">
|
|
|
4265 |
</port>
|
4266 |
</output>
|
4267 |
</layer>
|
4268 |
+
<layer id="329" name="__module.down_blocks.0.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_217" type="ShapeOf" version="opset3">
|
4269 |
<data output_type="i32" />
|
4270 |
<input>
|
4271 |
<port id="0" precision="FP32">
|
|
|
6907 |
</port>
|
6908 |
</output>
|
6909 |
</layer>
|
6910 |
+
<layer id="509" name="__module.down_blocks.0.attentions.1.norm/aten::group_norm/ShapeOf" type="ShapeOf" version="opset3">
|
6911 |
<data output_type="i32" />
|
6912 |
<input>
|
6913 |
<port id="0" precision="FP32">
|
|
|
7370 |
</port>
|
7371 |
</output>
|
7372 |
</layer>
|
7373 |
+
<layer id="541" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_382" type="ShapeOf" version="opset3">
|
7374 |
<data output_type="i32" />
|
7375 |
<input>
|
7376 |
<port id="0" precision="FP32">
|
|
|
7660 |
</port>
|
7661 |
</output>
|
7662 |
</layer>
|
7663 |
+
<layer id="564" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_440" type="ShapeOf" version="opset3">
|
7664 |
<data output_type="i32" />
|
7665 |
<input>
|
7666 |
<port id="0" precision="FP32">
|
|
|
7736 |
</port>
|
7737 |
</output>
|
7738 |
</layer>
|
7739 |
+
<layer id="570" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_391" type="ShapeOf" version="opset3">
|
7740 |
<data output_type="i32" />
|
7741 |
<input>
|
7742 |
<port id="0" precision="FP32">
|
|
|
7868 |
<port id="1" precision="I32" names="989,990" />
|
7869 |
</output>
|
7870 |
</layer>
|
7871 |
+
<layer id="583" name="Constant_255913" type="Const" version="opset1">
|
7872 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
7873 |
<output>
|
7874 |
<port id="0" precision="I64">
|
|
|
7876 |
</port>
|
7877 |
</output>
|
7878 |
</layer>
|
7879 |
+
<layer id="584" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_412/Reshape_1" type="Reshape" version="opset1">
|
7880 |
<data special_zero="false" />
|
7881 |
<input>
|
7882 |
<port id="0" precision="I32" />
|
|
|
8286 |
</port>
|
8287 |
</output>
|
8288 |
</layer>
|
8289 |
+
<layer id="610" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_417" type="ShapeOf" version="opset3">
|
8290 |
<data output_type="i32" />
|
8291 |
<input>
|
8292 |
<port id="0" precision="FP32">
|
|
|
8650 |
<port id="1" precision="I32" names="1027,1028" />
|
8651 |
</output>
|
8652 |
</layer>
|
8653 |
+
<layer id="639" name="Constant_255926" type="Const" version="opset1">
|
8654 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
8655 |
<output>
|
8656 |
<port id="0" precision="I64">
|
|
|
8658 |
</port>
|
8659 |
</output>
|
8660 |
</layer>
|
8661 |
+
<layer id="640" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_481/Reshape" type="Reshape" version="opset1">
|
8662 |
<data special_zero="false" />
|
8663 |
<input>
|
8664 |
<port id="0" precision="I32" />
|
|
|
9094 |
</port>
|
9095 |
</output>
|
9096 |
</layer>
|
9097 |
+
<layer id="670" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
9098 |
<data output_type="i32" />
|
9099 |
<input>
|
9100 |
<port id="0" precision="FP32">
|
|
|
9226 |
<port id="1" precision="I32" names="1048,1049" />
|
9227 |
</output>
|
9228 |
</layer>
|
9229 |
+
<layer id="683" name="Constant_255935" type="Const" version="opset1">
|
9230 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
9231 |
<output>
|
9232 |
<port id="0" precision="I64">
|
|
|
9234 |
</port>
|
9235 |
</output>
|
9236 |
</layer>
|
9237 |
+
<layer id="684" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/prim::ListConstruct_497/Reshape_1" type="Reshape" version="opset1">
|
9238 |
<data special_zero="false" />
|
9239 |
<input>
|
9240 |
<port id="0" precision="I32" />
|
|
|
9460 |
</port>
|
9461 |
</output>
|
9462 |
</layer>
|
9463 |
+
<layer id="699" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_499" type="ShapeOf" version="opset3">
|
9464 |
<data output_type="i32" />
|
9465 |
<input>
|
9466 |
<port id="0" precision="FP32">
|
|
|
9967 |
</port>
|
9968 |
</output>
|
9969 |
</layer>
|
9970 |
+
<layer id="735" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_525" type="ShapeOf" version="opset3">
|
9971 |
<data output_type="i32" />
|
9972 |
<input>
|
9973 |
<port id="0" precision="FP32">
|
|
|
10288 |
<port id="1" precision="I32" names="1100,1101" />
|
10289 |
</output>
|
10290 |
</layer>
|
10291 |
+
<layer id="760" name="Constant_255957" type="Const" version="opset1">
|
10292 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
10293 |
<output>
|
10294 |
<port id="0" precision="I64">
|
|
|
10296 |
</port>
|
10297 |
</output>
|
10298 |
</layer>
|
10299 |
+
<layer id="761" name="__module.down_blocks.0.attentions.1.transformer_blocks.0.attn2/prim::ListConstruct_589/Reshape" type="Reshape" version="opset1">
|
10300 |
<data special_zero="false" />
|
10301 |
<input>
|
10302 |
<port id="0" precision="I32" />
|
|
|
12232 |
</port>
|
12233 |
</output>
|
12234 |
</layer>
|
12235 |
+
<layer id="885" name="__module.down_blocks.1.attentions.0/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
12236 |
<data output_type="i32" />
|
12237 |
<input>
|
12238 |
<port id="0" precision="FP32">
|
|
|
12695 |
</port>
|
12696 |
</output>
|
12697 |
</layer>
|
12698 |
+
<layer id="917" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
12699 |
<data output_type="i32" />
|
12700 |
<input>
|
12701 |
<port id="0" precision="FP32">
|
|
|
12991 |
</port>
|
12992 |
</output>
|
12993 |
</layer>
|
12994 |
+
<layer id="941" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_730" type="ShapeOf" version="opset3">
|
12995 |
<data output_type="i32" />
|
12996 |
<input>
|
12997 |
<port id="0" precision="FP32">
|
|
|
13749 |
<port id="1" precision="I32" names="1193,1194" />
|
13750 |
</output>
|
13751 |
</layer>
|
13752 |
+
<layer id="1000" name="Constant_255987" type="Const" version="opset1">
|
13753 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
13754 |
<output>
|
13755 |
<port id="0" precision="I64">
|
|
|
13757 |
</port>
|
13758 |
</output>
|
13759 |
</layer>
|
13760 |
+
<layer id="1001" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_721/Reshape_2" type="Reshape" version="opset1">
|
13761 |
<data special_zero="false" />
|
13762 |
<input>
|
13763 |
<port id="0" precision="I32" />
|
|
|
13981 |
<port id="1" precision="I32" names="1217,1218" />
|
13982 |
</output>
|
13983 |
</layer>
|
13984 |
+
<layer id="1016" name="Constant_255991" type="Const" version="opset1">
|
13985 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
13986 |
<output>
|
13987 |
<port id="0" precision="I64">
|
|
|
13989 |
</port>
|
13990 |
</output>
|
13991 |
</layer>
|
13992 |
+
<layer id="1017" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_767/Reshape" type="Reshape" version="opset1">
|
13993 |
<data special_zero="false" />
|
13994 |
<input>
|
13995 |
<port id="0" precision="I32" />
|
|
|
14425 |
</port>
|
14426 |
</output>
|
14427 |
</layer>
|
14428 |
+
<layer id="1047" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_780" type="ShapeOf" version="opset3">
|
14429 |
<data output_type="i32" />
|
14430 |
<input>
|
14431 |
<port id="0" precision="FP32">
|
|
|
14557 |
<port id="1" precision="I32" names="1238,1239" />
|
14558 |
</output>
|
14559 |
</layer>
|
14560 |
+
<layer id="1060" name="Constant_256004" type="Const" version="opset1">
|
14561 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
14562 |
<output>
|
14563 |
<port id="0" precision="I64">
|
|
|
14565 |
</port>
|
14566 |
</output>
|
14567 |
</layer>
|
14568 |
+
<layer id="1061" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/prim::ListConstruct_790/Reshape_1" type="Reshape" version="opset1">
|
14569 |
<data special_zero="false" />
|
14570 |
<input>
|
14571 |
<port id="0" precision="I32" />
|
|
|
14715 |
</port>
|
14716 |
</output>
|
14717 |
</layer>
|
14718 |
+
<layer id="1070" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_841" type="ShapeOf" version="opset3">
|
14719 |
<data output_type="i32" />
|
14720 |
<input>
|
14721 |
<port id="0" precision="FP32">
|
|
|
15298 |
</port>
|
15299 |
</output>
|
15300 |
</layer>
|
15301 |
+
<layer id="1112" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_815" type="ShapeOf" version="opset3">
|
15302 |
<data output_type="i32" />
|
15303 |
<input>
|
15304 |
<port id="0" precision="FP32">
|
|
|
15567 |
</port>
|
15568 |
</output>
|
15569 |
</layer>
|
15570 |
+
<layer id="1132" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_861" type="ShapeOf" version="opset3">
|
15571 |
<data output_type="i32" />
|
15572 |
<input>
|
15573 |
<port id="0" precision="FP32">
|
|
|
15619 |
<port id="1" precision="I32" names="1290,1291" />
|
15620 |
</output>
|
15621 |
</layer>
|
15622 |
+
<layer id="1137" name="Constant_256022" type="Const" version="opset1">
|
15623 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
15624 |
<output>
|
15625 |
<port id="0" precision="I64">
|
|
|
15627 |
</port>
|
15628 |
</output>
|
15629 |
</layer>
|
15630 |
+
<layer id="1138" name="__module.down_blocks.1.attentions.0.transformer_blocks.0.attn2/prim::ListConstruct_875/Reshape" type="Reshape" version="opset1">
|
15631 |
<data special_zero="false" />
|
15632 |
<input>
|
15633 |
<port id="0" precision="I32" />
|
|
|
17882 |
</port>
|
17883 |
</output>
|
17884 |
</layer>
|
17885 |
+
<layer id="1286" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_960" type="ShapeOf" version="opset3">
|
17886 |
<data output_type="i32" />
|
17887 |
<input>
|
17888 |
<port id="0" precision="FP32">
|
|
|
18380 |
<port id="1" precision="I32" names="1367,1368" />
|
18381 |
</output>
|
18382 |
</layer>
|
18383 |
+
<layer id="1328" name="Constant_256051" type="Const" version="opset1">
|
18384 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
18385 |
<output>
|
18386 |
<port id="0" precision="I64">
|
|
|
18388 |
</port>
|
18389 |
</output>
|
18390 |
</layer>
|
18391 |
+
<layer id="1329" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_990/Reshape_1" type="Reshape" version="opset1">
|
18392 |
<data special_zero="false" />
|
18393 |
<input>
|
18394 |
<port id="0" precision="I32" />
|
|
|
18798 |
</port>
|
18799 |
</output>
|
18800 |
</layer>
|
18801 |
+
<layer id="1355" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_995" type="ShapeOf" version="opset3">
|
18802 |
<data output_type="i32" />
|
18803 |
<input>
|
18804 |
<port id="0" precision="FP32">
|
|
|
19162 |
<port id="1" precision="I32" names="1405,1406" />
|
19163 |
</output>
|
19164 |
</layer>
|
19165 |
+
<layer id="1384" name="Constant_256064" type="Const" version="opset1">
|
19166 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
19167 |
<output>
|
19168 |
<port id="0" precision="I64">
|
|
|
19170 |
</port>
|
19171 |
</output>
|
19172 |
</layer>
|
19173 |
+
<layer id="1385" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_1059/Reshape" type="Reshape" version="opset1">
|
19174 |
<data special_zero="false" />
|
19175 |
<input>
|
19176 |
<port id="0" precision="I32" />
|
|
|
19606 |
</port>
|
19607 |
</output>
|
19608 |
</layer>
|
19609 |
+
<layer id="1415" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1068" type="ShapeOf" version="opset3">
|
19610 |
<data output_type="i32" />
|
19611 |
<input>
|
19612 |
<port id="0" precision="FP32">
|
|
|
20748 |
</port>
|
20749 |
</output>
|
20750 |
</layer>
|
20751 |
+
<layer id="1500" name="__module.down_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1146" type="ShapeOf" version="opset3">
|
20752 |
<data output_type="i32" />
|
20753 |
<input>
|
20754 |
<port id="0" precision="FP32">
|
|
|
22744 |
</port>
|
22745 |
</output>
|
22746 |
</layer>
|
22747 |
+
<layer id="1630" name="__module.down_blocks.2.attentions.0/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
22748 |
<data output_type="i32" />
|
22749 |
<input>
|
22750 |
<port id="0" precision="FP32">
|
|
|
23579 |
</port>
|
23580 |
</output>
|
23581 |
</layer>
|
23582 |
+
<layer id="1692" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_1268" type="ShapeOf" version="opset3">
|
23583 |
<data output_type="i32" />
|
23584 |
<input>
|
23585 |
<port id="0" precision="FP32">
|
|
|
23711 |
<port id="1" precision="I32" names="1557,1558" />
|
23712 |
</output>
|
23713 |
</layer>
|
23714 |
+
<layer id="1705" name="Constant_256117" type="Const" version="opset1">
|
23715 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
23716 |
<output>
|
23717 |
<port id="0" precision="I64">
|
|
|
23719 |
</port>
|
23720 |
</output>
|
23721 |
</layer>
|
23722 |
+
<layer id="1706" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_1276/Reshape_2" type="Reshape" version="opset1">
|
23723 |
<data special_zero="false" />
|
23724 |
<input>
|
23725 |
<port id="0" precision="I32" />
|
|
|
24261 |
<port id="1" precision="I32" names="1571,1572" />
|
24262 |
</output>
|
24263 |
</layer>
|
24264 |
+
<layer id="1745" name="Constant_256125" type="Const" version="opset1">
|
24265 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
24266 |
<output>
|
24267 |
<port id="0" precision="I64">
|
|
|
24269 |
</port>
|
24270 |
</output>
|
24271 |
</layer>
|
24272 |
+
<layer id="1746" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_1299/Reshape_2" type="Reshape" version="opset1">
|
24273 |
<data special_zero="false" />
|
24274 |
<input>
|
24275 |
<port id="0" precision="I32" />
|
|
|
24441 |
</port>
|
24442 |
</output>
|
24443 |
</layer>
|
24444 |
+
<layer id="1756" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_1334" type="ShapeOf" version="opset3">
|
24445 |
<data output_type="i32" />
|
24446 |
<input>
|
24447 |
<port id="0" precision="FP32">
|
|
|
24937 |
</port>
|
24938 |
</output>
|
24939 |
</layer>
|
24940 |
+
<layer id="1792" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_1358" type="ShapeOf" version="opset3">
|
24941 |
<data output_type="i32" />
|
24942 |
<input>
|
24943 |
<port id="0" precision="FP32">
|
|
|
25069 |
<port id="1" precision="I32" names="1616,1617" />
|
25070 |
</output>
|
25071 |
</layer>
|
25072 |
+
<layer id="1805" name="Constant_256142" type="Const" version="opset1">
|
25073 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
25074 |
<output>
|
25075 |
<port id="0" precision="I64">
|
|
|
25077 |
</port>
|
25078 |
</output>
|
25079 |
</layer>
|
25080 |
+
<layer id="1806" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn2/prim::ListConstruct_1368/Reshape_1" type="Reshape" version="opset1">
|
25081 |
<data special_zero="false" />
|
25082 |
<input>
|
25083 |
<port id="0" precision="I32" />
|
|
|
25303 |
</port>
|
25304 |
</output>
|
25305 |
</layer>
|
25306 |
+
<layer id="1821" name="__module.down_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_1370" type="ShapeOf" version="opset3">
|
25307 |
<data output_type="i32" />
|
25308 |
<input>
|
25309 |
<port id="0" precision="FP32">
|
|
|
27931 |
</port>
|
27932 |
</output>
|
27933 |
</layer>
|
27934 |
+
<layer id="1999" name="__module.down_blocks.2.attentions.1/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
27935 |
<data output_type="i32" />
|
27936 |
<input>
|
27937 |
<port id="0" precision="FP32">
|
|
|
29310 |
</port>
|
29311 |
</output>
|
29312 |
</layer>
|
29313 |
+
<layer id="2100" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_1573" type="ShapeOf" version="opset3">
|
29314 |
<data output_type="i32" />
|
29315 |
<input>
|
29316 |
<port id="0" precision="FP32">
|
|
|
29442 |
<port id="1" precision="I32" names="1759,1760" />
|
29443 |
</output>
|
29444 |
</layer>
|
29445 |
+
<layer id="2113" name="Constant_256197" type="Const" version="opset1">
|
29446 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
29447 |
<output>
|
29448 |
<port id="0" precision="I64">
|
|
|
29450 |
</port>
|
29451 |
</output>
|
29452 |
</layer>
|
29453 |
+
<layer id="2114" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_1591/Reshape_1" type="Reshape" version="opset1">
|
29454 |
<data special_zero="false" />
|
29455 |
<input>
|
29456 |
<port id="0" precision="I32" />
|
|
|
30118 |
</port>
|
30119 |
</output>
|
30120 |
</layer>
|
30121 |
+
<layer id="2160" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1643" type="ShapeOf" version="opset3">
|
30122 |
<data output_type="i32" />
|
30123 |
<input>
|
30124 |
<port id="0" precision="FP32">
|
|
|
30250 |
<port id="1" precision="I32" names="1804,1805" />
|
30251 |
</output>
|
30252 |
</layer>
|
30253 |
+
<layer id="2173" name="Constant_256208" type="Const" version="opset1">
|
30254 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
30255 |
<output>
|
30256 |
<port id="0" precision="I64">
|
|
|
30258 |
</port>
|
30259 |
</output>
|
30260 |
</layer>
|
30261 |
+
<layer id="2174" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/prim::ListConstruct_1649/Reshape_2" type="Reshape" version="opset1">
|
30262 |
<data special_zero="false" />
|
30263 |
<input>
|
30264 |
<port id="0" precision="I32" />
|
|
|
30408 |
</port>
|
30409 |
</output>
|
30410 |
</layer>
|
30411 |
+
<layer id="2183" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1704" type="ShapeOf" version="opset3">
|
30412 |
<data output_type="i32" />
|
30413 |
<input>
|
30414 |
<port id="0" precision="FP32">
|
|
|
30484 |
</port>
|
30485 |
</output>
|
30486 |
</layer>
|
30487 |
+
<layer id="2189" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1655" type="ShapeOf" version="opset3">
|
30488 |
<data output_type="i32" />
|
30489 |
<input>
|
30490 |
<port id="0" precision="FP32">
|
|
|
30991 |
</port>
|
30992 |
</output>
|
30993 |
</layer>
|
30994 |
+
<layer id="2225" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1678" type="ShapeOf" version="opset3">
|
30995 |
<data output_type="i32" />
|
30996 |
<input>
|
30997 |
<port id="0" precision="FP32">
|
|
|
31260 |
</port>
|
31261 |
</output>
|
31262 |
</layer>
|
31263 |
+
<layer id="2245" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_1724" type="ShapeOf" version="opset3">
|
31264 |
<data output_type="i32" />
|
31265 |
<input>
|
31266 |
<port id="0" precision="FP32">
|
|
|
31312 |
<port id="1" precision="I32" names="1856,1857" />
|
31313 |
</output>
|
31314 |
</layer>
|
31315 |
+
<layer id="2250" name="Constant_256229" type="Const" version="opset1">
|
31316 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
31317 |
<output>
|
31318 |
<port id="0" precision="I64">
|
|
|
31320 |
</port>
|
31321 |
</output>
|
31322 |
</layer>
|
31323 |
+
<layer id="2251" name="__module.down_blocks.2.attentions.1.transformer_blocks.0.attn2/prim::ListConstruct_1738/Reshape" type="Reshape" version="opset1">
|
31324 |
<data special_zero="false" />
|
31325 |
<input>
|
31326 |
<port id="0" precision="I32" />
|
|
|
34680 |
</port>
|
34681 |
</output>
|
34682 |
</layer>
|
34683 |
+
<layer id="2463" name="__module.mid_block.attentions.0/aten::size/ShapeOf_1876" type="ShapeOf" version="opset3">
|
34684 |
<data output_type="i32" />
|
34685 |
<input>
|
34686 |
<port id="0" precision="FP32">
|
|
|
35143 |
</port>
|
35144 |
</output>
|
35145 |
</layer>
|
35146 |
+
<layer id="2495" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
35147 |
<data output_type="i32" />
|
35148 |
<input>
|
35149 |
<port id="0" precision="FP32">
|
|
|
35641 |
<port id="1" precision="I32" names="1962,1963" />
|
35642 |
</output>
|
35643 |
</layer>
|
35644 |
+
<layer id="2537" name="Constant_256255" type="Const" version="opset1">
|
35645 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
35646 |
<output>
|
35647 |
<port id="0" precision="I64">
|
|
|
35649 |
</port>
|
35650 |
</output>
|
35651 |
</layer>
|
35652 |
+
<layer id="2538" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_1922/Reshape_2" type="Reshape" version="opset1">
|
35653 |
<data special_zero="false" />
|
35654 |
<input>
|
35655 |
<port id="0" precision="I32" />
|
|
|
36059 |
</port>
|
36060 |
</output>
|
36061 |
</layer>
|
36062 |
+
<layer id="2564" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_1931" type="ShapeOf" version="opset3">
|
36063 |
<data output_type="i32" />
|
36064 |
<input>
|
36065 |
<port id="0" precision="FP32">
|
|
|
36191 |
<port id="1" precision="I32" names="1976,1977" />
|
36192 |
</output>
|
36193 |
</layer>
|
36194 |
+
<layer id="2577" name="Constant_256266" type="Const" version="opset1">
|
36195 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
36196 |
<output>
|
36197 |
<port id="0" precision="I64">
|
|
|
36199 |
</port>
|
36200 |
</output>
|
36201 |
</layer>
|
36202 |
+
<layer id="2578" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_1952/Reshape_1" type="Reshape" version="opset1">
|
36203 |
<data special_zero="false" />
|
36204 |
<input>
|
36205 |
<port id="0" precision="I32" />
|
|
|
36371 |
</port>
|
36372 |
</output>
|
36373 |
</layer>
|
36374 |
+
<layer id="2588" name="__module.mid_block.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_1977" type="ShapeOf" version="opset3">
|
36375 |
<data output_type="i32" />
|
36376 |
<input>
|
36377 |
<port id="0" precision="FP32">
|
|
|
36867 |
</port>
|
36868 |
</output>
|
36869 |
</layer>
|
36870 |
+
<layer id="2624" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_2007" type="ShapeOf" version="opset3">
|
36871 |
<data output_type="i32" />
|
36872 |
<input>
|
36873 |
<port id="0" precision="FP32">
|
|
|
36999 |
<port id="1" precision="I32" names="2021,2022" />
|
37000 |
</output>
|
37001 |
</layer>
|
37002 |
+
<layer id="2637" name="Constant_256280" type="Const" version="opset1">
|
37003 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
37004 |
<output>
|
37005 |
<port id="0" precision="I64">
|
|
|
37007 |
</port>
|
37008 |
</output>
|
37009 |
</layer>
|
37010 |
+
<layer id="2638" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/prim::ListConstruct_2014/Reshape_1" type="Reshape" version="opset1">
|
37011 |
<data special_zero="false" />
|
37012 |
<input>
|
37013 |
<port id="0" precision="I32" />
|
|
|
37740 |
</port>
|
37741 |
</output>
|
37742 |
</layer>
|
37743 |
+
<layer id="2689" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_2039" type="ShapeOf" version="opset3">
|
37744 |
<data output_type="i32" />
|
37745 |
<input>
|
37746 |
<port id="0" precision="FP32">
|
|
|
38009 |
</port>
|
38010 |
</output>
|
38011 |
</layer>
|
38012 |
+
<layer id="2709" name="__module.mid_block.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_2085" type="ShapeOf" version="opset3">
|
38013 |
<data output_type="i32" />
|
38014 |
<input>
|
38015 |
<port id="0" precision="FP32">
|
|
|
43354 |
</port>
|
43355 |
</output>
|
43356 |
</layer>
|
43357 |
+
<layer id="3042" name="__module.up_blocks.1.attentions.0/aten::size/ShapeOf_2336" type="ShapeOf" version="opset3">
|
43358 |
<data output_type="i32" />
|
43359 |
<input>
|
43360 |
<port id="0" precision="FP32">
|
|
|
44315 |
<port id="1" precision="I32" names="2220,2221" />
|
44316 |
</output>
|
44317 |
</layer>
|
44318 |
+
<layer id="3116" name="Constant_256324" type="Const" version="opset1">
|
44319 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
44320 |
<output>
|
44321 |
<port id="0" precision="I64">
|
|
|
44323 |
</port>
|
44324 |
</output>
|
44325 |
</layer>
|
44326 |
+
<layer id="3117" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_2382/Reshape_2" type="Reshape" version="opset1">
|
44327 |
<data special_zero="false" />
|
44328 |
<input>
|
44329 |
<port id="0" precision="I32" />
|
|
|
44865 |
<port id="1" precision="I32" names="2234,2235" />
|
44866 |
</output>
|
44867 |
</layer>
|
44868 |
+
<layer id="3156" name="Constant_256335" type="Const" version="opset1">
|
44869 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
44870 |
<output>
|
44871 |
<port id="0" precision="I64">
|
|
|
44873 |
</port>
|
44874 |
</output>
|
44875 |
</layer>
|
44876 |
+
<layer id="3157" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_2412/Reshape_1" type="Reshape" version="opset1">
|
44877 |
<data special_zero="false" />
|
44878 |
<input>
|
44879 |
<port id="0" precision="I32" />
|
|
|
45097 |
<port id="1" precision="I32" names="2258,2259" />
|
45098 |
</output>
|
45099 |
</layer>
|
45100 |
+
<layer id="3172" name="Constant_256336" type="Const" version="opset1">
|
45101 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
45102 |
<output>
|
45103 |
<port id="0" precision="I64">
|
|
|
45105 |
</port>
|
45106 |
</output>
|
45107 |
</layer>
|
45108 |
+
<layer id="3173" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_2451/Reshape" type="Reshape" version="opset1">
|
45109 |
<data special_zero="false" />
|
45110 |
<input>
|
45111 |
<port id="0" precision="I32" />
|
|
|
45831 |
</port>
|
45832 |
</output>
|
45833 |
</layer>
|
45834 |
+
<layer id="3226" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_2525" type="ShapeOf" version="opset3">
|
45835 |
<data output_type="i32" />
|
45836 |
<input>
|
45837 |
<port id="0" precision="FP32">
|
|
|
46414 |
</port>
|
46415 |
</output>
|
46416 |
</layer>
|
46417 |
+
<layer id="3268" name="__module.up_blocks.1.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_2502" type="ShapeOf" version="opset3">
|
46418 |
<data output_type="i32" />
|
46419 |
<input>
|
46420 |
<port id="0" precision="FP32">
|
|
|
49095 |
</port>
|
49096 |
</output>
|
49097 |
</layer>
|
49098 |
+
<layer id="3447" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_2649" type="ShapeOf" version="opset3">
|
49099 |
<data output_type="i32" />
|
49100 |
<input>
|
49101 |
<port id="0" precision="FP32">
|
|
|
49385 |
</port>
|
49386 |
</output>
|
49387 |
</layer>
|
49388 |
+
<layer id="3470" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_2707" type="ShapeOf" version="opset3">
|
49389 |
<data output_type="i32" />
|
49390 |
<input>
|
49391 |
<port id="0" precision="FP32">
|
|
|
50143 |
<port id="1" precision="I32" names="2425,2426" />
|
50144 |
</output>
|
50145 |
</layer>
|
50146 |
+
<layer id="3529" name="Constant_256401" type="Const" version="opset1">
|
50147 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
50148 |
<output>
|
50149 |
<port id="0" precision="I64">
|
|
|
50151 |
</port>
|
50152 |
</output>
|
50153 |
</layer>
|
50154 |
+
<layer id="3530" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_2698/Reshape_2" type="Reshape" version="opset1">
|
50155 |
<data special_zero="false" />
|
50156 |
<input>
|
50157 |
<port id="0" precision="I32" />
|
|
|
51692 |
</port>
|
51693 |
</output>
|
51694 |
</layer>
|
51695 |
+
<layer id="3641" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_2792" type="ShapeOf" version="opset3">
|
51696 |
<data output_type="i32" />
|
51697 |
<input>
|
51698 |
<port id="0" precision="FP32">
|
|
|
51961 |
</port>
|
51962 |
</output>
|
51963 |
</layer>
|
51964 |
+
<layer id="3661" name="__module.up_blocks.1.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_2841" type="ShapeOf" version="opset3">
|
51965 |
<data output_type="i32" />
|
51966 |
<input>
|
51967 |
<port id="0" precision="FP32">
|
|
|
53910 |
</port>
|
53911 |
</output>
|
53912 |
</layer>
|
53913 |
+
<layer id="3788" name="__module.up_blocks.1.attentions.2/aten::size/ShapeOf_2925" type="ShapeOf" version="opset3">
|
53914 |
<data output_type="i32" />
|
53915 |
<input>
|
53916 |
<port id="0" precision="FP32">
|
|
|
54739 |
</port>
|
54740 |
</output>
|
54741 |
</layer>
|
54742 |
+
<layer id="3849" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_2960" type="ShapeOf" version="opset3">
|
54743 |
<data output_type="i32" />
|
54744 |
<input>
|
54745 |
<port id="0" precision="FP32">
|
|
|
54871 |
<port id="1" precision="I32" names="2602,2603" />
|
54872 |
</output>
|
54873 |
</layer>
|
54874 |
+
<layer id="3862" name="Constant_256462" type="Const" version="opset1">
|
54875 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
54876 |
<output>
|
54877 |
<port id="0" precision="I64">
|
|
|
54879 |
</port>
|
54880 |
</output>
|
54881 |
</layer>
|
54882 |
+
<layer id="3863" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/prim::ListConstruct_2968/Reshape_2" type="Reshape" version="opset1">
|
54883 |
<data special_zero="false" />
|
54884 |
<input>
|
54885 |
<port id="0" precision="I32" />
|
|
|
55289 |
</port>
|
55290 |
</output>
|
55291 |
</layer>
|
55292 |
+
<layer id="3889" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_2977" type="ShapeOf" version="opset3">
|
55293 |
<data output_type="i32" />
|
55294 |
<input>
|
55295 |
<port id="0" precision="FP32">
|
|
|
55421 |
<port id="1" precision="I32" names="2616,2617" />
|
55422 |
</output>
|
55423 |
</layer>
|
55424 |
+
<layer id="3902" name="Constant_256473" type="Const" version="opset1">
|
55425 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
55426 |
<output>
|
55427 |
<port id="0" precision="I64">
|
|
|
55429 |
</port>
|
55430 |
</output>
|
55431 |
</layer>
|
55432 |
+
<layer id="3903" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn1/prim::ListConstruct_2998/Reshape_1" type="Reshape" version="opset1">
|
55433 |
<data special_zero="false" />
|
55434 |
<input>
|
55435 |
<port id="0" precision="I32" />
|
|
|
56097 |
</port>
|
56098 |
</output>
|
56099 |
</layer>
|
56100 |
+
<layer id="3949" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/aten::size/ShapeOf_3050" type="ShapeOf" version="opset3">
|
56101 |
<data output_type="i32" />
|
56102 |
<input>
|
56103 |
<port id="0" precision="FP32">
|
|
|
56229 |
<port id="1" precision="I32" names="2661,2662" />
|
56230 |
</output>
|
56231 |
</layer>
|
56232 |
+
<layer id="3962" name="Constant_256484" type="Const" version="opset1">
|
56233 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
56234 |
<output>
|
56235 |
<port id="0" precision="I64">
|
|
|
56237 |
</port>
|
56238 |
</output>
|
56239 |
</layer>
|
56240 |
+
<layer id="3963" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/prim::ListConstruct_3056/Reshape_2" type="Reshape" version="opset1">
|
56241 |
<data special_zero="false" />
|
56242 |
<input>
|
56243 |
<port id="0" precision="I32" />
|
|
|
56463 |
</port>
|
56464 |
</output>
|
56465 |
</layer>
|
56466 |
+
<layer id="3978" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/aten::size/ShapeOf_3062" type="ShapeOf" version="opset3">
|
56467 |
<data output_type="i32" />
|
56468 |
<input>
|
56469 |
<port id="0" precision="FP32">
|
|
|
57239 |
</port>
|
57240 |
</output>
|
57241 |
</layer>
|
57242 |
+
<layer id="4034" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/aten::size/ShapeOf_3137" type="ShapeOf" version="opset3">
|
57243 |
<data output_type="i32" />
|
57244 |
<input>
|
57245 |
<port id="0" precision="FP32">
|
|
|
57291 |
<port id="1" precision="I32" names="2713,2714" />
|
57292 |
</output>
|
57293 |
</layer>
|
57294 |
+
<layer id="4039" name="Constant_256509" type="Const" version="opset1">
|
57295 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
57296 |
<output>
|
57297 |
<port id="0" precision="I64">
|
|
|
57299 |
</port>
|
57300 |
</output>
|
57301 |
</layer>
|
57302 |
+
<layer id="4040" name="__module.up_blocks.1.attentions.2.transformer_blocks.0.attn2/prim::ListConstruct_3152/Reshape" type="Reshape" version="opset1">
|
57303 |
<data special_zero="false" />
|
57304 |
<input>
|
57305 |
<port id="0" precision="I32" />
|
|
|
59301 |
</port>
|
59302 |
</output>
|
59303 |
</layer>
|
59304 |
+
<layer id="4168" name="__module.up_blocks.2.attentions.0/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
59305 |
<data output_type="i32" />
|
59306 |
<input>
|
59307 |
<port id="0" precision="FP32">
|
|
|
59764 |
</port>
|
59765 |
</output>
|
59766 |
</layer>
|
59767 |
+
<layer id="4200" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_3240" type="ShapeOf" version="opset3">
|
59768 |
<data output_type="i32" />
|
59769 |
<input>
|
59770 |
<port id="0" precision="FP32">
|
|
|
60054 |
</port>
|
60055 |
</output>
|
60056 |
</layer>
|
60057 |
+
<layer id="4223" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_3295" type="ShapeOf" version="opset3">
|
60058 |
<data output_type="i32" />
|
60059 |
<input>
|
60060 |
<port id="0" precision="FP32">
|
|
|
60130 |
</port>
|
60131 |
</output>
|
60132 |
</layer>
|
60133 |
+
<layer id="4229" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_3249" type="ShapeOf" version="opset3">
|
60134 |
<data output_type="i32" />
|
60135 |
<input>
|
60136 |
<port id="0" precision="FP32">
|
|
|
60680 |
</port>
|
60681 |
</output>
|
60682 |
</layer>
|
60683 |
+
<layer id="4269" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_3275" type="ShapeOf" version="opset3">
|
60684 |
<data output_type="i32" />
|
60685 |
<input>
|
60686 |
<port id="0" precision="FP32">
|
|
|
60812 |
<port id="1" precision="I32" names="2809,2810" />
|
60813 |
</output>
|
60814 |
</layer>
|
60815 |
+
<layer id="4282" name="Constant_256542" type="Const" version="opset1">
|
60816 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
60817 |
<output>
|
60818 |
<port id="0" precision="I64">
|
|
|
60820 |
</port>
|
60821 |
</output>
|
60822 |
</layer>
|
60823 |
+
<layer id="4283" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_3293/Reshape_1" type="Reshape" version="opset1">
|
60824 |
<data special_zero="false" />
|
60825 |
<input>
|
60826 |
<port id="0" precision="I32" />
|
|
|
60992 |
</port>
|
60993 |
</output>
|
60994 |
</layer>
|
60995 |
+
<layer id="4293" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_3318" type="ShapeOf" version="opset3">
|
60996 |
<data output_type="i32" />
|
60997 |
<input>
|
60998 |
<port id="0" precision="FP32">
|
|
|
61488 |
</port>
|
61489 |
</output>
|
61490 |
</layer>
|
61491 |
+
<layer id="4329" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_3345" type="ShapeOf" version="opset3">
|
61492 |
<data output_type="i32" />
|
61493 |
<input>
|
61494 |
<port id="0" precision="FP32">
|
|
|
61854 |
</port>
|
61855 |
</output>
|
61856 |
</layer>
|
61857 |
+
<layer id="4358" name="__module.up_blocks.2.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_3360" type="ShapeOf" version="opset3">
|
61858 |
<data output_type="i32" />
|
61859 |
<input>
|
61860 |
<port id="0" precision="FP32">
|
|
|
64579 |
</port>
|
64580 |
</output>
|
64581 |
</layer>
|
64582 |
+
<layer id="4541" name="__module.up_blocks.2.attentions.1/aten::size/ShapeOf_3510" type="ShapeOf" version="opset3">
|
64583 |
<data output_type="i32" />
|
64584 |
<input>
|
64585 |
<port id="0" precision="FP32">
|
|
|
65174 |
<port id="1" precision="I32" names="2972,2973" />
|
65175 |
</output>
|
65176 |
</layer>
|
65177 |
+
<layer id="4586" name="Constant_256594" type="Const" version="opset1">
|
65178 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
65179 |
<output>
|
65180 |
<port id="0" precision="I64">
|
|
|
65182 |
</port>
|
65183 |
</output>
|
65184 |
</layer>
|
65185 |
+
<layer id="4587" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_3540/Reshape_1" type="Reshape" version="opset1">
|
65186 |
<data special_zero="false" />
|
65187 |
<input>
|
65188 |
<port id="0" precision="I32" />
|
|
|
65332 |
</port>
|
65333 |
</output>
|
65334 |
</layer>
|
65335 |
+
<layer id="4596" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_3588" type="ShapeOf" version="opset3">
|
65336 |
<data output_type="i32" />
|
65337 |
<input>
|
65338 |
<port id="0" precision="FP32">
|
|
|
65408 |
</port>
|
65409 |
</output>
|
65410 |
</layer>
|
65411 |
+
<layer id="4602" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_3548" type="ShapeOf" version="opset3">
|
65412 |
<data output_type="i32" />
|
65413 |
<input>
|
65414 |
<port id="0" precision="FP32">
|
|
|
65958 |
</port>
|
65959 |
</output>
|
65960 |
</layer>
|
65961 |
+
<layer id="4642" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_3565" type="ShapeOf" version="opset3">
|
65962 |
<data output_type="i32" />
|
65963 |
<input>
|
65964 |
<port id="0" precision="FP32">
|
|
|
66090 |
<port id="1" precision="I32" names="3000,3001" />
|
66091 |
</output>
|
66092 |
</layer>
|
66093 |
+
<layer id="4655" name="Constant_256608" type="Const" version="opset1">
|
66094 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
66095 |
<output>
|
66096 |
<port id="0" precision="I64">
|
|
|
66098 |
</port>
|
66099 |
</output>
|
66100 |
</layer>
|
66101 |
+
<layer id="4656" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_3579/Reshape_2" type="Reshape" version="opset1">
|
66102 |
<data special_zero="false" />
|
66103 |
<input>
|
66104 |
<port id="0" precision="I32" />
|
|
|
66270 |
</port>
|
66271 |
</output>
|
66272 |
</layer>
|
66273 |
+
<layer id="4666" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_3611" type="ShapeOf" version="opset3">
|
66274 |
<data output_type="i32" />
|
66275 |
<input>
|
66276 |
<port id="0" precision="FP32">
|
|
|
66766 |
</port>
|
66767 |
</output>
|
66768 |
</layer>
|
66769 |
+
<layer id="4702" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
66770 |
<data output_type="i32" />
|
66771 |
<input>
|
66772 |
<port id="0" precision="FP32">
|
|
|
67056 |
</port>
|
67057 |
</output>
|
67058 |
</layer>
|
67059 |
+
<layer id="4725" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_3696" type="ShapeOf" version="opset3">
|
67060 |
<data output_type="i32" />
|
67061 |
<input>
|
67062 |
<port id="0" precision="FP32">
|
|
|
67132 |
</port>
|
67133 |
</output>
|
67134 |
</layer>
|
67135 |
+
<layer id="4731" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_3650" type="ShapeOf" version="opset3">
|
67136 |
<data output_type="i32" />
|
67137 |
<input>
|
67138 |
<port id="0" precision="FP32">
|
|
|
67639 |
</port>
|
67640 |
</output>
|
67641 |
</layer>
|
67642 |
+
<layer id="4767" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_3673" type="ShapeOf" version="opset3">
|
67643 |
<data output_type="i32" />
|
67644 |
<input>
|
67645 |
<port id="0" precision="FP32">
|
|
|
67908 |
</port>
|
67909 |
</output>
|
67910 |
</layer>
|
67911 |
+
<layer id="4787" name="__module.up_blocks.2.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_3722" type="ShapeOf" version="opset3">
|
67912 |
<data output_type="i32" />
|
67913 |
<input>
|
67914 |
<port id="0" precision="FP32">
|
|
|
69857 |
</port>
|
69858 |
</output>
|
69859 |
</layer>
|
69860 |
+
<layer id="4914" name="__module.up_blocks.2.attentions.2/aten::size/ShapeOf_3806" type="ShapeOf" version="opset3">
|
69861 |
<data output_type="i32" />
|
69862 |
<input>
|
69863 |
<port id="0" precision="FP32">
|
|
|
70320 |
</port>
|
70321 |
</output>
|
70322 |
</layer>
|
70323 |
+
<layer id="4946" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_3823" type="ShapeOf" version="opset3">
|
70324 |
<data output_type="i32" />
|
70325 |
<input>
|
70326 |
<port id="0" precision="FP32">
|
|
|
71236 |
</port>
|
71237 |
</output>
|
71238 |
</layer>
|
71239 |
+
<layer id="5015" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_3864" type="ShapeOf" version="opset3">
|
71240 |
<data output_type="i32" />
|
71241 |
<input>
|
71242 |
<port id="0" precision="FP32">
|
|
|
71600 |
<port id="1" precision="I32" names="3215,3216" />
|
71601 |
</output>
|
71602 |
</layer>
|
71603 |
+
<layer id="5044" name="Constant_256685" type="Const" version="opset1">
|
71604 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
71605 |
<output>
|
71606 |
<port id="0" precision="I64">
|
|
|
71608 |
</port>
|
71609 |
</output>
|
71610 |
</layer>
|
71611 |
+
<layer id="5045" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn1/prim::ListConstruct_3925/Reshape" type="Reshape" version="opset1">
|
71612 |
<data special_zero="false" />
|
71613 |
<input>
|
71614 |
<port id="0" precision="I32" />
|
|
|
72044 |
</port>
|
72045 |
</output>
|
72046 |
</layer>
|
72047 |
+
<layer id="5075" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn2/aten::size/ShapeOf_3931" type="ShapeOf" version="opset3">
|
72048 |
<data output_type="i32" />
|
72049 |
<input>
|
72050 |
<port id="0" precision="FP32">
|
|
|
72176 |
<port id="1" precision="I32" names="3236,3237" />
|
72177 |
</output>
|
72178 |
</layer>
|
72179 |
+
<layer id="5088" name="Constant_256691" type="Const" version="opset1">
|
72180 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
72181 |
<output>
|
72182 |
<port id="0" precision="I64">
|
|
|
72184 |
</port>
|
72185 |
</output>
|
72186 |
</layer>
|
72187 |
+
<layer id="5089" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn2/prim::ListConstruct_3937/Reshape_2" type="Reshape" version="opset1">
|
72188 |
<data special_zero="false" />
|
72189 |
<input>
|
72190 |
<port id="0" precision="I32" />
|
|
|
73186 |
</port>
|
73187 |
</output>
|
73188 |
</layer>
|
73189 |
+
<layer id="5160" name="__module.up_blocks.2.attentions.2.transformer_blocks.0.attn2/aten::size/ShapeOf_4012" type="ShapeOf" version="opset3">
|
73190 |
<data output_type="i32" />
|
73191 |
<input>
|
73192 |
<port id="0" precision="FP32">
|
|
|
75248 |
</port>
|
75249 |
</output>
|
75250 |
</layer>
|
75251 |
+
<layer id="5294" name="__module.up_blocks.3.attentions.0/aten::size/ShapeOf_4098" type="ShapeOf" version="opset3">
|
75252 |
<data output_type="i32" />
|
75253 |
<input>
|
75254 |
<port id="0" precision="FP32">
|
|
|
76001 |
</port>
|
76002 |
</output>
|
76003 |
</layer>
|
76004 |
+
<layer id="5349" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_4179" type="ShapeOf" version="opset3">
|
76005 |
<data output_type="i32" />
|
76006 |
<input>
|
76007 |
<port id="0" precision="FP32">
|
|
|
76627 |
</port>
|
76628 |
</output>
|
76629 |
</layer>
|
76630 |
+
<layer id="5395" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/aten::size/ShapeOf_4156" type="ShapeOf" version="opset3">
|
76631 |
<data output_type="i32" />
|
76632 |
<input>
|
76633 |
<port id="0" precision="FP32">
|
|
|
76759 |
<port id="1" precision="I32" names="3384,3385" />
|
76760 |
</output>
|
76761 |
</layer>
|
76762 |
+
<layer id="5408" name="Constant_256749" type="Const" version="opset1">
|
76763 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
76764 |
<output>
|
76765 |
<port id="0" precision="I64">
|
|
|
76767 |
</port>
|
76768 |
</output>
|
76769 |
</layer>
|
76770 |
+
<layer id="5409" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_4174/Reshape_1" type="Reshape" version="opset1">
|
76771 |
<data special_zero="false" />
|
76772 |
<input>
|
76773 |
<port id="0" precision="I32" />
|
|
|
76991 |
<port id="1" precision="I32" names="3408,3409" />
|
76992 |
</output>
|
76993 |
</layer>
|
76994 |
+
<layer id="5424" name="Constant_256750" type="Const" version="opset1">
|
76995 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
76996 |
<output>
|
76997 |
<port id="0" precision="I64">
|
|
|
76999 |
</port>
|
77000 |
</output>
|
77001 |
</layer>
|
77002 |
+
<layer id="5425" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn1/prim::ListConstruct_4213/Reshape" type="Reshape" version="opset1">
|
77003 |
<data special_zero="false" />
|
77004 |
<input>
|
77005 |
<port id="0" precision="I32" />
|
|
|
77567 |
<port id="1" precision="I32" names="3429,3430" />
|
77568 |
</output>
|
77569 |
</layer>
|
77570 |
+
<layer id="5468" name="Constant_256763" type="Const" version="opset1">
|
77571 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
77572 |
<output>
|
77573 |
<port id="0" precision="I64">
|
|
|
77575 |
</port>
|
77576 |
</output>
|
77577 |
</layer>
|
77578 |
+
<layer id="5469" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/prim::ListConstruct_4236/Reshape_1" type="Reshape" version="opset1">
|
77579 |
<data special_zero="false" />
|
77580 |
<input>
|
77581 |
<port id="0" precision="I32" />
|
|
|
77801 |
</port>
|
77802 |
</output>
|
77803 |
</layer>
|
77804 |
+
<layer id="5484" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_4241" type="ShapeOf" version="opset3">
|
77805 |
<data output_type="i32" />
|
77806 |
<input>
|
77807 |
<port id="0" precision="FP32">
|
|
|
78308 |
</port>
|
78309 |
</output>
|
78310 |
</layer>
|
78311 |
+
<layer id="5520" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/aten::size/ShapeOf_4261" type="ShapeOf" version="opset3">
|
78312 |
<data output_type="i32" />
|
78313 |
<input>
|
78314 |
<port id="0" precision="FP32">
|
|
|
78629 |
<port id="1" precision="I32" names="3481,3482" />
|
78630 |
</output>
|
78631 |
</layer>
|
78632 |
+
<layer id="5545" name="Constant_256785" type="Const" version="opset1">
|
78633 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
78634 |
<output>
|
78635 |
<port id="0" precision="I64">
|
|
|
78637 |
</port>
|
78638 |
</output>
|
78639 |
</layer>
|
78640 |
+
<layer id="5546" name="__module.up_blocks.3.attentions.0.transformer_blocks.0.attn2/prim::ListConstruct_4328/Reshape" type="Reshape" version="opset1">
|
78641 |
<data special_zero="false" />
|
78642 |
<input>
|
78643 |
<port id="0" precision="I32" />
|
|
|
80526 |
</port>
|
80527 |
</output>
|
80528 |
</layer>
|
80529 |
+
<layer id="5667" name="__module.up_blocks.3.attentions.1/aten::size/ShapeOf_4391" type="ShapeOf" version="opset3">
|
80530 |
<data output_type="i32" />
|
80531 |
<input>
|
80532 |
<port id="0" precision="FP32">
|
|
|
81121 |
<port id="1" precision="I32" names="3547,3548" />
|
81122 |
</output>
|
81123 |
</layer>
|
81124 |
+
<layer id="5712" name="Constant_256798" type="Const" version="opset1">
|
81125 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
81126 |
<output>
|
81127 |
<port id="0" precision="I64">
|
|
|
81129 |
</port>
|
81130 |
</output>
|
81131 |
</layer>
|
81132 |
+
<layer id="5713" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_4417/Reshape_2" type="Reshape" version="opset1">
|
81133 |
<data special_zero="false" />
|
81134 |
<input>
|
81135 |
<port id="0" precision="I32" />
|
|
|
81279 |
</port>
|
81280 |
</output>
|
81281 |
</layer>
|
81282 |
+
<layer id="5722" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_4469" type="ShapeOf" version="opset3">
|
81283 |
<data output_type="i32" />
|
81284 |
<input>
|
81285 |
<port id="0" precision="FP32">
|
|
|
81355 |
</port>
|
81356 |
</output>
|
81357 |
</layer>
|
81358 |
+
<layer id="5728" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_4423" type="ShapeOf" version="opset3">
|
81359 |
<data output_type="i32" />
|
81360 |
<input>
|
81361 |
<port id="0" precision="FP32">
|
|
|
81487 |
<port id="1" precision="I32" names="3561,3562" />
|
81488 |
</output>
|
81489 |
</layer>
|
81490 |
+
<layer id="5741" name="Constant_256810" type="Const" version="opset1">
|
81491 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
81492 |
<output>
|
81493 |
<port id="0" precision="I64">
|
|
|
81495 |
</port>
|
81496 |
</output>
|
81497 |
</layer>
|
81498 |
+
<layer id="5742" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/prim::ListConstruct_4444/Reshape_1" type="Reshape" version="opset1">
|
81499 |
<data special_zero="false" />
|
81500 |
<input>
|
81501 |
<port id="0" precision="I32" />
|
|
|
82217 |
</port>
|
82218 |
</output>
|
82219 |
</layer>
|
82220 |
+
<layer id="5792" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn1/aten::size/ShapeOf_4495" type="ShapeOf" version="opset3">
|
82221 |
<data output_type="i32" />
|
82222 |
<input>
|
82223 |
<port id="0" precision="FP32">
|
|
|
83079 |
</port>
|
83080 |
</output>
|
83081 |
</layer>
|
83082 |
+
<layer id="5857" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_4531" type="ShapeOf" version="opset3">
|
83083 |
<data output_type="i32" />
|
83084 |
<input>
|
83085 |
<port id="0" precision="FP32">
|
|
|
83586 |
</port>
|
83587 |
</output>
|
83588 |
</layer>
|
83589 |
+
<layer id="5893" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_4554" type="ShapeOf" version="opset3">
|
83590 |
<data output_type="i32" />
|
83591 |
<input>
|
83592 |
<port id="0" precision="FP32">
|
|
|
83855 |
</port>
|
83856 |
</output>
|
83857 |
</layer>
|
83858 |
+
<layer id="5913" name="__module.up_blocks.3.attentions.1.transformer_blocks.0.attn2/aten::size/ShapeOf_4600" type="ShapeOf" version="opset3">
|
83859 |
<data output_type="i32" />
|
83860 |
<input>
|
83861 |
<port id="0" precision="FP32">
|
|
|
86399 |
<port id="1" precision="I32" names="3738,3739" />
|
86400 |
</output>
|
86401 |
</layer>
|
86402 |
+
<layer id="6085" name="Constant_256870" type="Const" version="opset1">
|
86403 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
86404 |
<output>
|
86405 |
<port id="0" precision="I64">
|
|
|
86407 |
</port>
|
86408 |
</output>
|
86409 |
</layer>
|
86410 |
+
<layer id="6086" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/prim::ListConstruct_4714/Reshape_1" type="Reshape" version="opset1">
|
86411 |
<data special_zero="false" />
|
86412 |
<input>
|
86413 |
<port id="0" precision="I32" />
|
|
|
86633 |
</port>
|
86634 |
</output>
|
86635 |
</layer>
|
86636 |
+
<layer id="6101" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_4722" type="ShapeOf" version="opset3">
|
86637 |
<data output_type="i32" />
|
86638 |
<input>
|
86639 |
<port id="0" precision="FP32">
|
|
|
87183 |
</port>
|
87184 |
</output>
|
87185 |
</layer>
|
87186 |
+
<layer id="6141" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_4745" type="ShapeOf" version="opset3">
|
87187 |
<data output_type="i32" />
|
87188 |
<input>
|
87189 |
<port id="0" precision="FP32">
|
|
|
87315 |
<port id="1" precision="I32" names="3766,3767" />
|
87316 |
</output>
|
87317 |
</layer>
|
87318 |
+
<layer id="6154" name="Constant_256887" type="Const" version="opset1">
|
87319 |
<data element_type="i64" shape="1" offset="17273056" size="8" />
|
87320 |
<output>
|
87321 |
<port id="0" precision="I64">
|
|
|
87323 |
</port>
|
87324 |
</output>
|
87325 |
</layer>
|
87326 |
+
<layer id="6155" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/prim::ListConstruct_4760/Reshape_1" type="Reshape" version="opset1">
|
87327 |
<data special_zero="false" />
|
87328 |
<input>
|
87329 |
<port id="0" precision="I32" />
|
|
|
87495 |
</port>
|
87496 |
</output>
|
87497 |
</layer>
|
87498 |
+
<layer id="6165" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn1/aten::size/ShapeOf_4785" type="ShapeOf" version="opset3">
|
87499 |
<data output_type="i32" />
|
87500 |
<input>
|
87501 |
<port id="0" precision="FP32">
|
|
|
89133 |
</port>
|
89134 |
</output>
|
89135 |
</layer>
|
89136 |
+
<layer id="6286" name="__module.up_blocks.3.attentions.2.transformer_blocks.0.attn2/aten::size/ShapeOf_4896" type="ShapeOf" version="opset3">
|
89137 |
<data output_type="i32" />
|
89138 |
<input>
|
89139 |
<port id="0" precision="FP32">
|
vae_decoder/openvino_model.xml
CHANGED
@@ -699,7 +699,7 @@
|
|
699 |
</port>
|
700 |
</output>
|
701 |
</layer>
|
702 |
-
<layer id="41" name="__module.decoder.mid_block.attentions.0/aten::size/
|
703 |
<data output_type="i32" />
|
704 |
<input>
|
705 |
<port id="0" precision="FP32">
|
@@ -1174,7 +1174,7 @@
|
|
1174 |
</port>
|
1175 |
</output>
|
1176 |
</layer>
|
1177 |
-
<layer id="77" name="__module.decoder.mid_block.attentions.0/aten::size/
|
1178 |
<data output_type="i32" />
|
1179 |
<input>
|
1180 |
<port id="0" precision="FP32">
|
@@ -1437,7 +1437,7 @@
|
|
1437 |
</port>
|
1438 |
</output>
|
1439 |
</layer>
|
1440 |
-
<layer id="98" name="__module.decoder.mid_block.attentions.0/aten::size/
|
1441 |
<data output_type="i32" />
|
1442 |
<input>
|
1443 |
<port id="0" precision="FP32">
|
@@ -2216,7 +2216,7 @@
|
|
2216 |
<port id="1" precision="I32" names="224,225" />
|
2217 |
</output>
|
2218 |
</layer>
|
2219 |
-
<layer id="157" name="
|
2220 |
<data element_type="i64" shape="1" offset="18962540" size="8" />
|
2221 |
<output>
|
2222 |
<port id="0" precision="I64">
|
@@ -2224,7 +2224,7 @@
|
|
2224 |
</port>
|
2225 |
</output>
|
2226 |
</layer>
|
2227 |
-
<layer id="158" name="__module.decoder.mid_block.attentions.0/prim::
|
2228 |
<data special_zero="false" />
|
2229 |
<input>
|
2230 |
<port id="0" precision="I32" />
|
@@ -2373,7 +2373,7 @@
|
|
2373 |
</port>
|
2374 |
</output>
|
2375 |
</layer>
|
2376 |
-
<layer id="166" name="__module.decoder.mid_block.attentions.0/aten::size/
|
2377 |
<data output_type="i32" />
|
2378 |
<input>
|
2379 |
<port id="0" precision="FP32">
|
|
|
699 |
</port>
|
700 |
</output>
|
701 |
</layer>
|
702 |
+
<layer id="41" name="__module.decoder.mid_block.attentions.0/aten::size/ShapeOf_35" type="ShapeOf" version="opset3">
|
703 |
<data output_type="i32" />
|
704 |
<input>
|
705 |
<port id="0" precision="FP32">
|
|
|
1174 |
</port>
|
1175 |
</output>
|
1176 |
</layer>
|
1177 |
+
<layer id="77" name="__module.decoder.mid_block.attentions.0/aten::size/ShapeOf_106" type="ShapeOf" version="opset3">
|
1178 |
<data output_type="i32" />
|
1179 |
<input>
|
1180 |
<port id="0" precision="FP32">
|
|
|
1437 |
</port>
|
1438 |
</output>
|
1439 |
</layer>
|
1440 |
+
<layer id="98" name="__module.decoder.mid_block.attentions.0/aten::size/ShapeOf_165" type="ShapeOf" version="opset3">
|
1441 |
<data output_type="i32" />
|
1442 |
<input>
|
1443 |
<port id="0" precision="FP32">
|
|
|
2216 |
<port id="1" precision="I32" names="224,225" />
|
2217 |
</output>
|
2218 |
</layer>
|
2219 |
+
<layer id="157" name="Constant_497848" type="Const" version="opset1">
|
2220 |
<data element_type="i64" shape="1" offset="18962540" size="8" />
|
2221 |
<output>
|
2222 |
<port id="0" precision="I64">
|
|
|
2224 |
</port>
|
2225 |
</output>
|
2226 |
</layer>
|
2227 |
+
<layer id="158" name="__module.decoder.mid_block.attentions.0/prim::ListConstruct_163/Reshape_1" type="Reshape" version="opset1">
|
2228 |
<data special_zero="false" />
|
2229 |
<input>
|
2230 |
<port id="0" precision="I32" />
|
|
|
2373 |
</port>
|
2374 |
</output>
|
2375 |
</layer>
|
2376 |
+
<layer id="166" name="__module.decoder.mid_block.attentions.0/aten::size/ShapeOf_203" type="ShapeOf" version="opset3">
|
2377 |
<data output_type="i32" />
|
2378 |
<input>
|
2379 |
<port id="0" precision="FP32">
|
vae_encoder/openvino_model.xml
CHANGED
@@ -5283,7 +5283,7 @@
|
|
5283 |
</port>
|
5284 |
</output>
|
5285 |
</layer>
|
5286 |
-
<layer id="305" name="__module.encoder.mid_block.attentions.0/aten::size/
|
5287 |
<data output_type="i32" />
|
5288 |
<input>
|
5289 |
<port id="0" precision="FP32">
|
@@ -6021,7 +6021,7 @@
|
|
6021 |
</port>
|
6022 |
</output>
|
6023 |
</layer>
|
6024 |
-
<layer id="362" name="__module.encoder.mid_block.attentions.0/aten::size/
|
6025 |
<data output_type="i32" />
|
6026 |
<input>
|
6027 |
<port id="0" precision="FP32">
|
@@ -6129,7 +6129,7 @@
|
|
6129 |
</port>
|
6130 |
</output>
|
6131 |
</layer>
|
6132 |
-
<layer id="370" name="__module.encoder.mid_block.attentions.0/aten::size/
|
6133 |
<data output_type="i32" />
|
6134 |
<input>
|
6135 |
<port id="0" precision="FP32">
|
@@ -6163,7 +6163,7 @@
|
|
6163 |
<port id="3" precision="I32" names="250,260,261" />
|
6164 |
</output>
|
6165 |
</layer>
|
6166 |
-
<layer id="373" name="
|
6167 |
<data element_type="i64" shape="1" offset="113409564" size="8" />
|
6168 |
<output>
|
6169 |
<port id="0" precision="I64">
|
@@ -6171,7 +6171,7 @@
|
|
6171 |
</port>
|
6172 |
</output>
|
6173 |
</layer>
|
6174 |
-
<layer id="374" name="__module.encoder.mid_block.attentions.0/prim::
|
6175 |
<data special_zero="false" />
|
6176 |
<input>
|
6177 |
<port id="0" precision="I32" />
|
@@ -6251,7 +6251,7 @@
|
|
6251 |
<port id="1" precision="I32" names="255,256" />
|
6252 |
</output>
|
6253 |
</layer>
|
6254 |
-
<layer id="382" name="
|
6255 |
<data element_type="i64" shape="1" offset="113409564" size="8" />
|
6256 |
<output>
|
6257 |
<port id="0" precision="I64">
|
@@ -6259,7 +6259,7 @@
|
|
6259 |
</port>
|
6260 |
</output>
|
6261 |
</layer>
|
6262 |
-
<layer id="383" name="__module.encoder.mid_block.attentions.0/prim::
|
6263 |
<data special_zero="false" />
|
6264 |
<input>
|
6265 |
<port id="0" precision="I32" />
|
@@ -6678,7 +6678,7 @@
|
|
6678 |
</port>
|
6679 |
</output>
|
6680 |
</layer>
|
6681 |
-
<layer id="409" name="__module.encoder.mid_block.attentions.0/aten::size/
|
6682 |
<data output_type="i32" />
|
6683 |
<input>
|
6684 |
<port id="0" precision="FP32">
|
@@ -6712,7 +6712,7 @@
|
|
6712 |
<port id="3" precision="I32" names="264,274,275" />
|
6713 |
</output>
|
6714 |
</layer>
|
6715 |
-
<layer id="412" name="
|
6716 |
<data element_type="i64" shape="1" offset="113409564" size="8" />
|
6717 |
<output>
|
6718 |
<port id="0" precision="I64">
|
@@ -6720,7 +6720,7 @@
|
|
6720 |
</port>
|
6721 |
</output>
|
6722 |
</layer>
|
6723 |
-
<layer id="413" name="__module.encoder.mid_block.attentions.0/prim::
|
6724 |
<data special_zero="false" />
|
6725 |
<input>
|
6726 |
<port id="0" precision="I32" />
|
@@ -8621,7 +8621,7 @@
|
|
8621 |
</output>
|
8622 |
</layer>
|
8623 |
<layer id="539" name="aten::randn/RandomUniform" type="RandomUniform" version="opset8">
|
8624 |
-
<data output_type="f32" op_seed="
|
8625 |
<input>
|
8626 |
<port id="0" precision="I32">
|
8627 |
<dim>4</dim>
|
@@ -8715,7 +8715,7 @@
|
|
8715 |
</output>
|
8716 |
</layer>
|
8717 |
<layer id="544" name="aten::randn/RandomUniform_508" type="RandomUniform" version="opset8">
|
8718 |
-
<data output_type="f32" op_seed="
|
8719 |
<input>
|
8720 |
<port id="0" precision="I32">
|
8721 |
<dim>4</dim>
|
|
|
5283 |
</port>
|
5284 |
</output>
|
5285 |
</layer>
|
5286 |
+
<layer id="305" name="__module.encoder.mid_block.attentions.0/aten::size/ShapeOf" type="ShapeOf" version="opset3">
|
5287 |
<data output_type="i32" />
|
5288 |
<input>
|
5289 |
<port id="0" precision="FP32">
|
|
|
6021 |
</port>
|
6022 |
</output>
|
6023 |
</layer>
|
6024 |
+
<layer id="362" name="__module.encoder.mid_block.attentions.0/aten::size/ShapeOf_378" type="ShapeOf" version="opset3">
|
6025 |
<data output_type="i32" />
|
6026 |
<input>
|
6027 |
<port id="0" precision="FP32">
|
|
|
6129 |
</port>
|
6130 |
</output>
|
6131 |
</layer>
|
6132 |
+
<layer id="370" name="__module.encoder.mid_block.attentions.0/aten::size/ShapeOf_335" type="ShapeOf" version="opset3">
|
6133 |
<data output_type="i32" />
|
6134 |
<input>
|
6135 |
<port id="0" precision="FP32">
|
|
|
6163 |
<port id="3" precision="I32" names="250,260,261" />
|
6164 |
</output>
|
6165 |
</layer>
|
6166 |
+
<layer id="373" name="Constant_477291" type="Const" version="opset1">
|
6167 |
<data element_type="i64" shape="1" offset="113409564" size="8" />
|
6168 |
<output>
|
6169 |
<port id="0" precision="I64">
|
|
|
6171 |
</port>
|
6172 |
</output>
|
6173 |
</layer>
|
6174 |
+
<layer id="374" name="__module.encoder.mid_block.attentions.0/prim::ListConstruct_350/Reshape" type="Reshape" version="opset1">
|
6175 |
<data special_zero="false" />
|
6176 |
<input>
|
6177 |
<port id="0" precision="I32" />
|
|
|
6251 |
<port id="1" precision="I32" names="255,256" />
|
6252 |
</output>
|
6253 |
</layer>
|
6254 |
+
<layer id="382" name="Constant_477293" type="Const" version="opset1">
|
6255 |
<data element_type="i64" shape="1" offset="113409564" size="8" />
|
6256 |
<output>
|
6257 |
<port id="0" precision="I64">
|
|
|
6259 |
</port>
|
6260 |
</output>
|
6261 |
</layer>
|
6262 |
+
<layer id="383" name="__module.encoder.mid_block.attentions.0/prim::ListConstruct_350/Reshape_1" type="Reshape" version="opset1">
|
6263 |
<data special_zero="false" />
|
6264 |
<input>
|
6265 |
<port id="0" precision="I32" />
|
|
|
6678 |
</port>
|
6679 |
</output>
|
6680 |
</layer>
|
6681 |
+
<layer id="409" name="__module.encoder.mid_block.attentions.0/aten::size/ShapeOf_358" type="ShapeOf" version="opset3">
|
6682 |
<data output_type="i32" />
|
6683 |
<input>
|
6684 |
<port id="0" precision="FP32">
|
|
|
6712 |
<port id="3" precision="I32" names="264,274,275" />
|
6713 |
</output>
|
6714 |
</layer>
|
6715 |
+
<layer id="412" name="Constant_477295" type="Const" version="opset1">
|
6716 |
<data element_type="i64" shape="1" offset="113409564" size="8" />
|
6717 |
<output>
|
6718 |
<port id="0" precision="I64">
|
|
|
6720 |
</port>
|
6721 |
</output>
|
6722 |
</layer>
|
6723 |
+
<layer id="413" name="__module.encoder.mid_block.attentions.0/prim::ListConstruct_366/Reshape" type="Reshape" version="opset1">
|
6724 |
<data special_zero="false" />
|
6725 |
<input>
|
6726 |
<port id="0" precision="I32" />
|
|
|
8621 |
</output>
|
8622 |
</layer>
|
8623 |
<layer id="539" name="aten::randn/RandomUniform" type="RandomUniform" version="opset8">
|
8624 |
+
<data output_type="f32" op_seed="2258" global_seed="0" />
|
8625 |
<input>
|
8626 |
<port id="0" precision="I32">
|
8627 |
<dim>4</dim>
|
|
|
8715 |
</output>
|
8716 |
</layer>
|
8717 |
<layer id="544" name="aten::randn/RandomUniform_508" type="RandomUniform" version="opset8">
|
8718 |
+
<data output_type="f32" op_seed="9369" global_seed="0" />
|
8719 |
<input>
|
8720 |
<port id="0" precision="I32">
|
8721 |
<dim>4</dim>
|