7767517 23075 23074 pnnx.Input pnnx_input_0 0 1 a.1 #a.1=(1,3,384,384)f32 pnnx.Input pnnx_input_1 0 1 b.1 #b.1=(1,3,384,384)f32 prim::Constant pnnx_3 0 1 144 value=1.000000e+00 pnnx.Attribute pnnx_4 0 1 137 @pnnx_4=(1,3,1,1)f32 #137=(1,3,1,1)f32 prim::Constant pnnx_5 0 1 90 value=1 prim::Constant pnnx_6 0 1 80 value=2 prim::Constant pnnx_7 0 1 84 value=3 prim::Constant pnnx_8 0 1 88 value=8 prim::Constant pnnx_9 0 1 100 value=0 aten::size pnnx_11 2 1 a.1 80 81 #a.1=(1,3,384,384)f32 prim::NumToTensor pnnx_12 1 1 81 h.1 aten::size pnnx_13 2 1 a.1 84 85 #a.1=(1,3,384,384)f32 prim::NumToTensor pnnx_14 1 1 85 w.1 aten::remainder pnnx_15 2 1 h.1 88 89 prim::Constant pnnx_16 0 1 20004 value=8 aten::rsub pnnx_17 3 1 89 20004 90 91 prim::Constant pnnx_18 0 1 20005 value=8 aten::remainder pnnx_19 2 1 91 20005 93 aten::Int pnnx_20 1 1 93 95 prim::Constant pnnx_21 0 1 20006 value=8 aten::remainder pnnx_22 2 1 w.1 20006 97 prim::Constant pnnx_23 0 1 20007 value=8 prim::Constant pnnx_24 0 1 20008 value=1 aten::rsub pnnx_25 3 1 97 20007 20008 99 prim::Constant pnnx_26 0 1 20009 value=8 aten::remainder pnnx_27 2 1 99 20009 102 aten::Int pnnx_28 1 1 102 104 prim::Constant pnnx_29 0 1 20010 value=0 prim::ListConstruct pnnx_30 4 1 100 104 20010 95 106 prim::Constant pnnx_32 0 1 20011 value=2 aten::size pnnx_33 2 1 b.1 20011 111 #b.1=(1,3,384,384)f32 prim::NumToTensor pnnx_34 1 1 111 h0.1 prim::Constant pnnx_35 0 1 20012 value=3 aten::size pnnx_36 2 1 b.1 20012 114 #b.1=(1,3,384,384)f32 prim::NumToTensor pnnx_37 1 1 114 w0.1 prim::Constant pnnx_38 0 1 20013 value=8 aten::remainder pnnx_39 2 1 h0.1 20013 117 prim::Constant pnnx_40 0 1 20014 value=8 prim::Constant pnnx_41 0 1 20015 value=1 aten::rsub pnnx_42 3 1 117 20014 20015 119 prim::Constant pnnx_43 0 1 20016 value=8 aten::remainder pnnx_44 2 1 119 20016 121 aten::Int pnnx_45 1 1 121 123 prim::Constant pnnx_46 0 1 20017 value=8 aten::remainder pnnx_47 2 1 w0.1 20017 125 prim::Constant pnnx_48 0 1 20018 value=8 prim::Constant pnnx_49 0 1 20019 value=1 aten::rsub pnnx_50 3 1 125 20018 20019 127 prim::Constant pnnx_51 0 1 20020 value=8 aten::remainder pnnx_52 2 1 127 20020 129 aten::Int pnnx_53 1 1 129 131 prim::Constant pnnx_54 0 1 20021 value=0 prim::Constant pnnx_55 0 1 20022 value=0 prim::ListConstruct pnnx_56 4 1 20021 131 20022 123 133 F.pad F.pad_0 2 1 a.1 106 a0.1 mode=reflect $input=a.1 $pad=106 #a.1=(1,3,384,384)f32 #a0.1=(1,3,384,384)f32 aten::type_as pnnx_58 2 1 137 a0.1 139 #137=(1,3,1,1)f32 #a0.1=(1,3,384,384)f32 #139=(1,3,1,1)f32 prim::Constant pnnx_59 0 1 20023 value=1 aten::sub pnnx_60 3 1 a0.1 139 20023 143 #a0.1=(1,3,384,384)f32 #139=(1,3,1,1)f32 #143=(1,3,384,384)f32 aten::mul pnnx_61 2 1 143 144 input.7 #143=(1,3,384,384)f32 #input.7=(1,3,384,384)f32 prim::Constant pnnx_62 0 1 20024 value=1 F.pad F.pad_1 2 1 b.1 133 b0.1 mode=reflect $input=b.1 $pad=133 #b.1=(1,3,384,384)f32 #b0.1=(1,3,384,384)f32 aten::sub pnnx_63 3 1 b0.1 139 20024 149 #b0.1=(1,3,384,384)f32 #139=(1,3,1,1)f32 #149=(1,3,384,384)f32 prim::Constant pnnx_64 0 1 20025 value=1.000000e+00 aten::mul pnnx_65 2 1 149 20025 input0.3 #149=(1,3,384,384)f32 #input0.3=(1,3,384,384)f32 nn.Conv2d conv_first 1 1 input.7 154 bias=True dilation=(1,1) groups=1 in_channels=3 kernel_size=(8,8) out_channels=192 padding=(0,0) padding_mode=zeros stride=(8,8) @bias=(192)f32 @weight=(192,3,8,8)f32 #input.7=(1,3,384,384)f32 #154=(1,192,48,48)f32 nn.Conv2d conv_first 1 1 input0.3 157 bias=True dilation=(1,1) groups=1 in_channels=3 kernel_size=(8,8) out_channels=192 padding=(0,0) padding_mode=zeros stride=(8,8) @bias=(192)f32 @weight=(192,3,8,8)f32 #input0.3=(1,3,384,384)f32 #157=(1,192,48,48)f32 prim::Constant pnnx_66 0 1 20026 value=2 aten::size pnnx_67 2 1 154 20026 159 #154=(1,192,48,48)f32 prim::NumToTensor pnnx_68 1 1 159 H.1 aten::Int pnnx_69 1 1 H.1 163 aten::Int pnnx_70 1 1 H.1 166 aten::Int pnnx_71 1 1 H.1 169 aten::Int pnnx_72 1 1 H.1 172 aten::Int pnnx_73 1 1 H.1 175 aten::Int pnnx_74 1 1 H.1 178 aten::Int pnnx_75 1 1 H.1 181 aten::Int pnnx_76 1 1 H.1 184 aten::Int pnnx_77 1 1 H.1 187 aten::Int pnnx_78 1 1 H.1 190 aten::Int pnnx_79 1 1 H.1 193 aten::Int pnnx_80 1 1 H.1 196 aten::Int pnnx_81 1 1 H.1 199 aten::Int pnnx_82 1 1 H.1 202 aten::Int pnnx_83 1 1 H.1 205 aten::Int pnnx_84 1 1 H.1 208 aten::Int pnnx_85 1 1 H.1 211 aten::Int pnnx_86 1 1 H.1 214 aten::Int pnnx_87 1 1 H.1 217 aten::Int pnnx_88 1 1 H.1 220 aten::Int pnnx_89 1 1 H.1 223 aten::Int pnnx_90 1 1 H.1 226 aten::Int pnnx_91 1 1 H.1 229 aten::Int pnnx_92 1 1 H.1 232 aten::Int pnnx_93 1 1 H.1 235 aten::Int pnnx_94 1 1 H.1 238 aten::Int pnnx_95 1 1 H.1 241 aten::Int pnnx_96 1 1 H.1 244 aten::Int pnnx_97 1 1 H.1 247 aten::Int pnnx_98 1 1 H.1 250 aten::Int pnnx_99 1 1 H.1 253 aten::Int pnnx_100 1 1 H.1 256 aten::Int pnnx_101 1 1 H.1 259 aten::Int pnnx_102 1 1 H.1 262 aten::Int pnnx_103 1 1 H.1 265 aten::Int pnnx_104 1 1 H.1 268 aten::Int pnnx_105 1 1 H.1 271 aten::Int pnnx_106 1 1 H.1 274 aten::Int pnnx_107 1 1 H.1 277 aten::Int pnnx_108 1 1 H.1 280 aten::Int pnnx_109 1 1 H.1 283 aten::Int pnnx_110 1 1 H.1 286 aten::Int pnnx_111 1 1 H.1 289 aten::Int pnnx_112 1 1 H.1 292 aten::Int pnnx_113 1 1 H.1 295 aten::Int pnnx_114 1 1 H.1 298 aten::Int pnnx_115 1 1 H.1 301 aten::Int pnnx_116 1 1 H.1 304 aten::Int pnnx_117 1 1 H.1 307 aten::Int pnnx_118 1 1 H.1 310 aten::Int pnnx_119 1 1 H.1 313 aten::Int pnnx_120 1 1 H.1 316 aten::Int pnnx_121 1 1 H.1 319 aten::Int pnnx_122 1 1 H.1 322 aten::Int pnnx_123 1 1 H.1 325 aten::Int pnnx_124 1 1 H.1 328 aten::Int pnnx_125 1 1 H.1 331 aten::Int pnnx_126 1 1 H.1 334 aten::Int pnnx_127 1 1 H.1 337 aten::Int pnnx_128 1 1 H.1 340 aten::Int pnnx_129 1 1 H.1 343 aten::Int pnnx_130 1 1 H.1 346 aten::Int pnnx_131 1 1 H.1 349 aten::Int pnnx_132 1 1 H.1 352 aten::Int pnnx_133 1 1 H.1 355 aten::Int pnnx_134 1 1 H.1 358 aten::Int pnnx_135 1 1 H.1 361 aten::Int pnnx_136 1 1 H.1 364 aten::Int pnnx_137 1 1 H.1 367 aten::Int pnnx_138 1 1 H.1 370 aten::Int pnnx_139 1 1 H.1 373 aten::Int pnnx_140 1 1 H.1 376 aten::Int pnnx_141 1 1 H.1 379 aten::Int pnnx_142 1 1 H.1 382 aten::Int pnnx_143 1 1 H.1 385 aten::Int pnnx_144 1 1 H.1 388 aten::Int pnnx_145 1 1 H.1 391 aten::Int pnnx_146 1 1 H.1 394 aten::Int pnnx_147 1 1 H.1 397 prim::Constant pnnx_148 0 1 20027 value=3 aten::size pnnx_149 2 1 154 20027 399 #154=(1,192,48,48)f32 prim::NumToTensor pnnx_150 1 1 399 W.1 aten::Int pnnx_151 1 1 W.1 403 aten::Int pnnx_152 1 1 W.1 406 aten::Int pnnx_153 1 1 W.1 409 aten::Int pnnx_154 1 1 W.1 412 aten::Int pnnx_155 1 1 W.1 415 aten::Int pnnx_156 1 1 W.1 418 aten::Int pnnx_157 1 1 W.1 421 aten::Int pnnx_158 1 1 W.1 424 aten::Int pnnx_159 1 1 W.1 427 aten::Int pnnx_160 1 1 W.1 430 aten::Int pnnx_161 1 1 W.1 433 aten::Int pnnx_162 1 1 W.1 436 aten::Int pnnx_163 1 1 W.1 439 aten::Int pnnx_164 1 1 W.1 442 aten::Int pnnx_165 1 1 W.1 445 aten::Int pnnx_166 1 1 W.1 448 aten::Int pnnx_167 1 1 W.1 451 aten::Int pnnx_168 1 1 W.1 454 aten::Int pnnx_169 1 1 W.1 457 aten::Int pnnx_170 1 1 W.1 460 aten::Int pnnx_171 1 1 W.1 463 aten::Int pnnx_172 1 1 W.1 466 aten::Int pnnx_173 1 1 W.1 469 aten::Int pnnx_174 1 1 W.1 472 aten::Int pnnx_175 1 1 W.1 475 aten::Int pnnx_176 1 1 W.1 478 aten::Int pnnx_177 1 1 W.1 481 aten::Int pnnx_178 1 1 W.1 484 aten::Int pnnx_179 1 1 W.1 487 aten::Int pnnx_180 1 1 W.1 490 aten::Int pnnx_181 1 1 W.1 493 aten::Int pnnx_182 1 1 W.1 496 aten::Int pnnx_183 1 1 W.1 499 aten::Int pnnx_184 1 1 W.1 502 aten::Int pnnx_185 1 1 W.1 505 aten::Int pnnx_186 1 1 W.1 508 aten::Int pnnx_187 1 1 W.1 511 aten::Int pnnx_188 1 1 W.1 514 aten::Int pnnx_189 1 1 W.1 517 aten::Int pnnx_190 1 1 W.1 520 aten::Int pnnx_191 1 1 W.1 523 aten::Int pnnx_192 1 1 W.1 526 aten::Int pnnx_193 1 1 W.1 529 aten::Int pnnx_194 1 1 W.1 532 aten::Int pnnx_195 1 1 W.1 535 aten::Int pnnx_196 1 1 W.1 538 aten::Int pnnx_197 1 1 W.1 541 aten::Int pnnx_198 1 1 W.1 544 aten::Int pnnx_199 1 1 W.1 547 aten::Int pnnx_200 1 1 W.1 550 aten::Int pnnx_201 1 1 W.1 553 aten::Int pnnx_202 1 1 W.1 556 aten::Int pnnx_203 1 1 W.1 559 aten::Int pnnx_204 1 1 W.1 562 aten::Int pnnx_205 1 1 W.1 565 aten::Int pnnx_206 1 1 W.1 568 aten::Int pnnx_207 1 1 W.1 571 aten::Int pnnx_208 1 1 W.1 574 aten::Int pnnx_209 1 1 W.1 577 aten::Int pnnx_210 1 1 W.1 580 aten::Int pnnx_211 1 1 W.1 583 aten::Int pnnx_212 1 1 W.1 586 aten::Int pnnx_213 1 1 W.1 589 aten::Int pnnx_214 1 1 W.1 592 aten::Int pnnx_215 1 1 W.1 595 aten::Int pnnx_216 1 1 W.1 598 aten::Int pnnx_217 1 1 W.1 601 aten::Int pnnx_218 1 1 W.1 604 aten::Int pnnx_219 1 1 W.1 607 aten::Int pnnx_220 1 1 W.1 610 aten::Int pnnx_221 1 1 W.1 613 aten::Int pnnx_222 1 1 W.1 616 aten::Int pnnx_223 1 1 W.1 619 aten::Int pnnx_224 1 1 W.1 622 aten::Int pnnx_225 1 1 W.1 625 aten::Int pnnx_226 1 1 W.1 628 aten::Int pnnx_227 1 1 W.1 631 aten::Int pnnx_228 1 1 W.1 634 aten::Int pnnx_229 1 1 W.1 637 prim::Constant pnnx_230 0 1 2244 value=-1 prim::Constant pnnx_231 0 1 2245 value=2 prim::Constant pnnx_232 0 1 2246 value=1 prim::Constant pnnx_234 0 1 20028 value=2 torch.flatten torch.flatten_2183 3 1 154 2245 2244 2248 $input=154 $start_dim=2245 $end_dim=2244 #154=(1,192,48,48)f32 #2248=(1,192,2304)f32 torch.transpose torch.transpose_2960 3 1 2248 2246 20028 input.9 $input=2248 $dim0=2246 $dim1=20028 #2248=(1,192,2304)f32 #input.9=(1,2304,192)f32 nn.LayerNorm patch_embed_dfe.norm 1 1 input.9 2250 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.9=(1,2304,192)f32 #2250=(1,2304,192)f32 nn.Dropout pos_drop_dfe 1 1 2250 642 #2250=(1,2304,192)f32 #642=(1,2304,192)f32 prim::Constant pnnx_236 0 1 2251 value=1 prim::Constant pnnx_237 0 1 2268 value=trunc prim::Constant pnnx_238 0 1 2269 value=8 prim::Constant pnnx_239 0 1 2270 value=0 prim::Constant pnnx_240 0 1 2271 value=2 prim::Constant pnnx_241 0 1 2272 value=1 prim::Constant pnnx_242 0 1 2273 value=3 prim::Constant pnnx_243 0 1 2274 value=8 prim::Constant pnnx_244 0 1 2275 value=4 prim::Constant pnnx_245 0 1 2276 value=5 prim::Constant pnnx_246 0 1 2277 value=-1 prim::Constant pnnx_247 0 1 2278 value=64 aten::size pnnx_248 2 1 642 2270 2284 #642=(1,2304,192)f32 prim::NumToTensor pnnx_249 1 1 2284 B.5 aten::Int pnnx_250 1 1 B.5 2286 aten::Int pnnx_251 1 1 B.5 2287 aten::size pnnx_252 2 1 642 2271 2288 #642=(1,2304,192)f32 prim::NumToTensor pnnx_253 1 1 2288 C.11 aten::Int pnnx_254 1 1 C.11 2290 aten::Int pnnx_255 1 1 C.11 2291 aten::Int pnnx_256 1 1 C.11 2292 aten::Int pnnx_257 1 1 C.11 2293 nn.LayerNorm layers_dfe.0.residual_group.blocks.0.norm1 1 1 642 2294 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #642=(1,2304,192)f32 #2294=(1,2304,192)f32 prim::ListConstruct pnnx_258 4 1 2287 397 637 2293 2295 prim::Constant pnnx_260 0 1 20029 value=0 Tensor.view Tensor.view_974 2 1 2294 2295 x.3 $input=2294 $shape=2295 #2294=(1,2304,192)f32 #x.3=(1,48,48,192)f32 aten::size pnnx_261 2 1 x.3 20029 2297 #x.3=(1,48,48,192)f32 prim::NumToTensor pnnx_262 1 1 2297 B0.3 aten::Int pnnx_263 1 1 B0.3 2299 aten::size pnnx_264 2 1 x.3 2272 2300 #x.3=(1,48,48,192)f32 prim::NumToTensor pnnx_265 1 1 2300 2301 prim::Constant pnnx_266 0 1 20030 value=2 aten::size pnnx_267 2 1 x.3 20030 2302 #x.3=(1,48,48,192)f32 prim::NumToTensor pnnx_268 1 1 2302 2303 aten::size pnnx_269 2 1 x.3 2273 2304 #x.3=(1,48,48,192)f32 prim::NumToTensor pnnx_270 1 1 2304 C0.3 aten::Int pnnx_271 1 1 C0.3 2306 aten::Int pnnx_272 1 1 C0.3 2307 aten::div pnnx_273 3 1 2301 2269 2268 2308 aten::Int pnnx_274 1 1 2308 2309 prim::Constant pnnx_275 0 1 20031 value=8 prim::Constant pnnx_276 0 1 20032 value=trunc aten::div pnnx_277 3 1 2303 20031 20032 2310 aten::Int pnnx_278 1 1 2310 2311 prim::Constant pnnx_279 0 1 20033 value=8 prim::ListConstruct pnnx_280 6 1 2299 2309 2274 2311 20033 2307 2312 prim::Constant pnnx_282 0 1 20034 value=0 prim::Constant pnnx_283 0 1 20035 value=1 prim::Constant pnnx_284 0 1 20036 value=3 prim::Constant pnnx_285 0 1 20037 value=2 prim::ListConstruct pnnx_286 6 1 20034 20035 20036 20037 2275 2276 2314 Tensor.view Tensor.view_975 2 1 x.3 2312 x0.3 $input=x.3 $shape=2312 #x.3=(1,48,48,192)f32 #x0.3=(1,6,8,6,8,192)f32 prim::Constant pnnx_290 0 1 20039 value=8 prim::Constant pnnx_291 0 1 20040 value=8 prim::ListConstruct pnnx_292 4 1 2277 20039 20040 2306 2317 torch.permute torch.permute_2528 2 1 x0.3 2314 2315 $input=x0.3 $dims=2314 #x0.3=(1,6,8,6,8,192)f32 #2315=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_2 1 1 2315 2316 memory_format=torch.contiguous_format $input=2315 #2315=(1,6,6,8,8,192)f32 #2316=(1,6,6,8,8,192)f32 prim::Constant pnnx_294 0 1 20041 value=-1 prim::ListConstruct pnnx_295 3 1 20041 2278 2292 2319 prim::Constant pnnx_297 0 1 2321 value=1.767767e-01 prim::Constant pnnx_298 0 1 2322 value=trunc prim::Constant pnnx_299 0 1 2323 value=6 prim::Constant pnnx_300 0 1 2324 value=0 prim::Constant pnnx_301 0 1 2325 value=1 prim::Constant pnnx_302 0 1 2326 value=2 prim::Constant pnnx_303 0 1 2327 value=3 prim::Constant pnnx_304 0 1 2328 value=6 prim::Constant pnnx_305 0 1 2329 value=4 prim::Constant pnnx_306 0 1 2330 value=-2 prim::Constant pnnx_307 0 1 2331 value=-1 prim::Constant pnnx_308 0 1 2332 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.0.attn 0 1 relative_position_bias_table.3 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.3=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.0.attn 0 1 relative_position_index.3 @relative_position_index=(64,64)i64 #relative_position_index.3=(64,64)i64 Tensor.view Tensor.view_976 2 1 2316 2317 x_windows.3 $input=2316 $shape=2317 #2316=(1,6,6,8,8,192)f32 #x_windows.3=(36,8,8,192)f32 Tensor.view Tensor.view_977 2 1 x_windows.3 2319 x1.3 $input=x_windows.3 $shape=2319 #x_windows.3=(36,8,8,192)f32 #x1.3=(36,64,192)f32 aten::size pnnx_309 2 1 x1.3 2324 2340 #x1.3=(36,64,192)f32 prim::NumToTensor pnnx_310 1 1 2340 B_.3 aten::Int pnnx_311 1 1 B_.3 2342 aten::Int pnnx_312 1 1 B_.3 2343 aten::size pnnx_313 2 1 x1.3 2325 2344 #x1.3=(36,64,192)f32 prim::NumToTensor pnnx_314 1 1 2344 N.3 aten::Int pnnx_315 1 1 N.3 2346 aten::Int pnnx_316 1 1 N.3 2347 aten::size pnnx_317 2 1 x1.3 2326 2348 #x1.3=(36,64,192)f32 prim::NumToTensor pnnx_318 1 1 2348 C.13 aten::Int pnnx_319 1 1 C.13 2350 nn.Linear layers_dfe.0.residual_group.blocks.0.attn.qkv 1 1 x1.3 2351 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.3=(36,64,192)f32 #2351=(36,64,576)f32 aten::div pnnx_320 3 1 C.13 2323 2322 2352 aten::Int pnnx_321 1 1 2352 2353 prim::ListConstruct pnnx_322 5 1 2343 2347 2327 2328 2353 2354 prim::Constant pnnx_324 0 1 20042 value=2 prim::Constant pnnx_325 0 1 20043 value=0 prim::Constant pnnx_326 0 1 20044 value=3 prim::Constant pnnx_327 0 1 20045 value=1 prim::ListConstruct pnnx_328 5 1 20042 20043 20044 20045 2329 2356 Tensor.reshape Tensor.reshape_434 2 1 2351 2354 2355 $input=2351 $shape=2354 #2351=(36,64,576)f32 #2355=(36,64,3,6,32)f32 prim::Constant pnnx_330 0 1 20046 value=0 prim::Constant pnnx_331 0 1 20047 value=0 prim::Constant pnnx_333 0 1 20048 value=0 prim::Constant pnnx_334 0 1 20049 value=1 prim::Constant pnnx_336 0 1 20050 value=0 prim::Constant pnnx_337 0 1 20051 value=2 torch.permute torch.permute_2529 2 1 2355 2356 qkv0.3 $input=2355 $dims=2356 #2355=(36,64,3,6,32)f32 #qkv0.3=(3,36,6,64,32)f32 Tensor.select Tensor.select_650 3 1 qkv0.3 20046 20047 q.3 $input=qkv0.3 $dim=20046 $index=20047 #qkv0.3=(3,36,6,64,32)f32 #q.3=(36,6,64,32)f32 aten::mul pnnx_339 2 1 q.3 2321 q0.3 #q.3=(36,6,64,32)f32 #q0.3=(36,6,64,32)f32 Tensor.select Tensor.select_651 3 1 qkv0.3 20048 20049 k.3 $input=qkv0.3 $dim=20048 $index=20049 #qkv0.3=(3,36,6,64,32)f32 #k.3=(36,6,64,32)f32 prim::Constant pnnx_342 0 1 20052 value=-1 prim::ListConstruct pnnx_343 1 1 20052 2364 Tensor.view Tensor.view_978 2 1 relative_position_index.3 2364 2365 $input=relative_position_index.3 $shape=2364 #relative_position_index.3=(64,64)i64 #2365=(4096)i64 prim::ListConstruct pnnx_345 1 1 2365 2366 #2365=(4096)i64 prim::Constant pnnx_347 0 1 20053 value=64 prim::Constant pnnx_348 0 1 20054 value=-1 prim::ListConstruct pnnx_349 3 1 2332 20053 20054 2368 Tensor.index Tensor.index_326 2 1 relative_position_bias_table.3 2366 2367 $input=relative_position_bias_table.3 $expr=2366 #relative_position_bias_table.3=(225,6)f32 #2367=(4096,6)f32 prim::Constant pnnx_351 0 1 20055 value=2 prim::Constant pnnx_352 0 1 20056 value=0 prim::Constant pnnx_353 0 1 20057 value=1 prim::ListConstruct pnnx_354 3 1 20055 20056 20057 2370 Tensor.view Tensor.view_979 2 1 2367 2368 relative_position_bias.3 $input=2367 $shape=2368 #2367=(4096,6)f32 #relative_position_bias.3=(64,64,6)f32 prim::Constant pnnx_358 0 1 20059 value=0 torch.permute torch.permute_2530 2 1 relative_position_bias.3 2370 2371 $input=relative_position_bias.3 $dims=2370 #relative_position_bias.3=(64,64,6)f32 #2371=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_3 1 1 2371 relative_position_bias0.3 memory_format=torch.contiguous_format $input=2371 #2371=(6,64,64)f32 #relative_position_bias0.3=(6,64,64)f32 prim::Constant pnnx_360 0 1 20060 value=1 torch.transpose torch.transpose_2961 3 1 k.3 2330 2331 2362 $input=k.3 $dim0=2330 $dim1=2331 #k.3=(36,6,64,32)f32 #2362=(36,6,32,64)f32 torch.matmul torch.matmul_2204 2 1 q0.3 2362 attn.7 $input=q0.3 $other=2362 #q0.3=(36,6,64,32)f32 #2362=(36,6,32,64)f32 #attn.7=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3218 2 1 relative_position_bias0.3 20059 2373 $input=relative_position_bias0.3 $dim=20059 #relative_position_bias0.3=(6,64,64)f32 #2373=(1,6,64,64)f32 aten::add pnnx_361 3 1 attn.7 2373 20060 input.11 #attn.7=(36,6,64,64)f32 #2373=(1,6,64,64)f32 #input.11=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.0.attn.softmax 1 1 input.11 2375 dim=-1 #input.11=(36,6,64,64)f32 #2375=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.attn.attn_drop 1 1 2375 2376 #2375=(36,6,64,64)f32 #2376=(36,6,64,64)f32 Tensor.select Tensor.select_652 3 1 qkv0.3 20050 20051 v.3 $input=qkv0.3 $dim=20050 $index=20051 #qkv0.3=(3,36,6,64,32)f32 #v.3=(36,6,64,32)f32 prim::Constant pnnx_363 0 1 20061 value=1 prim::Constant pnnx_364 0 1 20062 value=2 torch.matmul torch.matmul_2205 2 1 2376 v.3 2377 $input=2376 $other=v.3 #2376=(36,6,64,64)f32 #v.3=(36,6,64,32)f32 #2377=(36,6,64,32)f32 prim::ListConstruct pnnx_366 3 1 2342 2346 2350 2379 torch.transpose torch.transpose_2962 3 1 2377 20061 20062 2378 $input=2377 $dim0=20061 $dim1=20062 #2377=(36,6,64,32)f32 #2378=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_435 2 1 2378 2379 input0.5 $input=2378 $shape=2379 #2378=(36,64,6,32)f32 #input0.5=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.0.attn.proj 1 1 input0.5 2381 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.5=(36,64,192)f32 #2381=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.attn.proj_drop 1 1 2381 2382 #2381=(36,64,192)f32 #2382=(36,64,192)f32 prim::Constant pnnx_368 0 1 20063 value=-1 prim::Constant pnnx_369 0 1 20064 value=8 prim::Constant pnnx_370 0 1 20065 value=8 prim::ListConstruct pnnx_371 4 1 20063 20064 20065 2291 2383 prim::Constant pnnx_373 0 1 20066 value=8 prim::Constant pnnx_374 0 1 20067 value=trunc aten::div pnnx_375 3 1 H.1 20066 20067 2385 aten::Int pnnx_376 1 1 2385 2386 prim::Constant pnnx_377 0 1 20068 value=8 prim::Constant pnnx_378 0 1 20069 value=trunc aten::div pnnx_379 3 1 W.1 20068 20069 2387 aten::Int pnnx_380 1 1 2387 2388 prim::Constant pnnx_381 0 1 20070 value=1 prim::Constant pnnx_382 0 1 20071 value=8 prim::Constant pnnx_383 0 1 20072 value=8 prim::Constant pnnx_384 0 1 20073 value=-1 prim::ListConstruct pnnx_385 6 1 20070 2386 2388 20071 20072 20073 2389 prim::Constant pnnx_387 0 1 20074 value=0 prim::Constant pnnx_388 0 1 20075 value=1 prim::Constant pnnx_389 0 1 20076 value=3 prim::Constant pnnx_390 0 1 20077 value=2 prim::Constant pnnx_391 0 1 20078 value=4 prim::Constant pnnx_392 0 1 20079 value=5 prim::ListConstruct pnnx_393 6 1 20074 20075 20076 20077 20078 20079 2391 Tensor.view Tensor.view_980 2 1 2382 2383 windows.3 $input=2382 $shape=2383 #2382=(36,64,192)f32 #windows.3=(36,8,8,192)f32 Tensor.view Tensor.view_981 2 1 windows.3 2389 x2.3 $input=windows.3 $shape=2389 #windows.3=(36,8,8,192)f32 #x2.3=(1,6,6,8,8,192)f32 prim::Constant pnnx_397 0 1 20081 value=1 prim::Constant pnnx_398 0 1 20082 value=-1 prim::ListConstruct pnnx_399 4 1 20081 394 634 20082 2394 torch.permute torch.permute_2531 2 1 x2.3 2391 2392 $input=x2.3 $dims=2391 #x2.3=(1,6,6,8,8,192)f32 #2392=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_4 1 1 2392 2393 memory_format=torch.contiguous_format $input=2392 #2392=(1,6,8,6,8,192)f32 #2393=(1,6,8,6,8,192)f32 aten::mul pnnx_401 2 1 H.1 W.1 2396 aten::Int pnnx_402 1 1 2396 2397 prim::ListConstruct pnnx_403 3 1 2286 2397 2290 2398 prim::Constant pnnx_405 0 1 2400 value=None prim::Constant pnnx_406 0 1 20083 value=1 Tensor.view Tensor.view_982 2 1 2393 2394 x3.3 $input=2393 $shape=2394 #2393=(1,6,8,6,8,192)f32 #x3.3=(1,48,48,192)f32 Tensor.view Tensor.view_983 2 1 x3.3 2398 x4.3 $input=x3.3 $shape=2398 #x3.3=(1,48,48,192)f32 #x4.3=(1,2304,192)f32 aten::add pnnx_407 3 1 642 x4.3 20083 input.13 #642=(1,2304,192)f32 #x4.3=(1,2304,192)f32 #input.13=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.0.norm2 1 1 input.13 2402 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.13=(1,2304,192)f32 #2402=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.0.mlp.fc1 1 1 2402 2407 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #2402=(1,2304,192)f32 #2407=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.0.mlp.act 1 1 2407 2408 #2407=(1,2304,384)f32 #2408=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.mlp.drop 1 1 2408 2409 #2408=(1,2304,384)f32 #2409=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.0.mlp.fc2 1 1 2409 2410 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #2409=(1,2304,384)f32 #2410=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.mlp.drop 1 1 2410 2411 #2410=(1,2304,192)f32 #2411=(1,2304,192)f32 prim::Constant pnnx_408 0 1 2412 value=None prim::Constant pnnx_409 0 1 20084 value=1 aten::add pnnx_410 3 1 input.13 2411 20084 2413 #input.13=(1,2304,192)f32 #2411=(1,2304,192)f32 #2413=(1,2304,192)f32 prim::Constant pnnx_411 0 1 2414 value=trunc prim::Constant pnnx_412 0 1 2415 value=8 prim::Constant pnnx_413 0 1 2416 value=0 prim::Constant pnnx_414 0 1 2417 value=2 prim::Constant pnnx_415 0 1 2418 value=-4 prim::Constant pnnx_416 0 1 2419 value=1 prim::Constant pnnx_417 0 1 2420 value=3 prim::Constant pnnx_418 0 1 2421 value=8 prim::Constant pnnx_419 0 1 2422 value=4 prim::Constant pnnx_420 0 1 2423 value=5 prim::Constant pnnx_421 0 1 2424 value=-1 prim::Constant pnnx_422 0 1 2425 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.1 0 1 attn_mask.3 @attn_mask=(36,64,64)f32 #attn_mask.3=(36,64,64)f32 aten::size pnnx_423 2 1 2413 2416 2432 #2413=(1,2304,192)f32 prim::NumToTensor pnnx_424 1 1 2432 B.7 aten::Int pnnx_425 1 1 B.7 2434 aten::Int pnnx_426 1 1 B.7 2435 aten::size pnnx_427 2 1 2413 2417 2436 #2413=(1,2304,192)f32 prim::NumToTensor pnnx_428 1 1 2436 C.15 aten::Int pnnx_429 1 1 C.15 2438 aten::Int pnnx_430 1 1 C.15 2439 aten::Int pnnx_431 1 1 C.15 2440 aten::Int pnnx_432 1 1 C.15 2441 nn.LayerNorm layers_dfe.0.residual_group.blocks.1.norm1 1 1 2413 2442 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #2413=(1,2304,192)f32 #2442=(1,2304,192)f32 prim::ListConstruct pnnx_433 4 1 2435 391 631 2441 2443 prim::Constant pnnx_435 0 1 20085 value=-4 prim::ListConstruct pnnx_436 2 1 2418 20085 2445 prim::Constant pnnx_437 0 1 20086 value=2 prim::ListConstruct pnnx_438 2 1 2419 20086 2446 Tensor.view Tensor.view_984 2 1 2442 2443 x.5 $input=2442 $shape=2443 #2442=(1,2304,192)f32 #x.5=(1,48,48,192)f32 prim::Constant pnnx_440 0 1 20087 value=0 torch.roll torch.roll_2420 3 1 x.5 2445 2446 x0.5 $input=x.5 $shifts=2445 $dims=2446 #x.5=(1,48,48,192)f32 #x0.5=(1,48,48,192)f32 aten::size pnnx_441 2 1 x0.5 20087 2448 #x0.5=(1,48,48,192)f32 prim::NumToTensor pnnx_442 1 1 2448 B0.5 aten::Int pnnx_443 1 1 B0.5 2450 prim::Constant pnnx_444 0 1 20088 value=1 aten::size pnnx_445 2 1 x0.5 20088 2451 #x0.5=(1,48,48,192)f32 prim::NumToTensor pnnx_446 1 1 2451 2452 prim::Constant pnnx_447 0 1 20089 value=2 aten::size pnnx_448 2 1 x0.5 20089 2453 #x0.5=(1,48,48,192)f32 prim::NumToTensor pnnx_449 1 1 2453 2454 aten::size pnnx_450 2 1 x0.5 2420 2455 #x0.5=(1,48,48,192)f32 prim::NumToTensor pnnx_451 1 1 2455 C0.5 aten::Int pnnx_452 1 1 C0.5 2457 aten::Int pnnx_453 1 1 C0.5 2458 aten::div pnnx_454 3 1 2452 2415 2414 2459 aten::Int pnnx_455 1 1 2459 2460 prim::Constant pnnx_456 0 1 20090 value=8 prim::Constant pnnx_457 0 1 20091 value=trunc aten::div pnnx_458 3 1 2454 20090 20091 2461 aten::Int pnnx_459 1 1 2461 2462 prim::Constant pnnx_460 0 1 20092 value=8 prim::ListConstruct pnnx_461 6 1 2450 2460 2421 2462 20092 2458 2463 prim::Constant pnnx_463 0 1 20093 value=0 prim::Constant pnnx_464 0 1 20094 value=1 prim::Constant pnnx_465 0 1 20095 value=3 prim::Constant pnnx_466 0 1 20096 value=2 prim::ListConstruct pnnx_467 6 1 20093 20094 20095 20096 2422 2423 2465 Tensor.view Tensor.view_985 2 1 x0.5 2463 x1.5 $input=x0.5 $shape=2463 #x0.5=(1,48,48,192)f32 #x1.5=(1,6,8,6,8,192)f32 prim::Constant pnnx_471 0 1 20098 value=8 prim::Constant pnnx_472 0 1 20099 value=8 prim::ListConstruct pnnx_473 4 1 2424 20098 20099 2457 2468 torch.permute torch.permute_2532 2 1 x1.5 2465 2466 $input=x1.5 $dims=2465 #x1.5=(1,6,8,6,8,192)f32 #2466=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_5 1 1 2466 2467 memory_format=torch.contiguous_format $input=2466 #2466=(1,6,6,8,8,192)f32 #2467=(1,6,6,8,8,192)f32 prim::Constant pnnx_475 0 1 20100 value=-1 prim::ListConstruct pnnx_476 3 1 20100 2425 2440 2470 prim::Constant pnnx_478 0 1 2472 value=1.767767e-01 prim::Constant pnnx_479 0 1 2473 value=trunc prim::Constant pnnx_480 0 1 2474 value=6 prim::Constant pnnx_481 0 1 2475 value=0 prim::Constant pnnx_482 0 1 2476 value=1 prim::Constant pnnx_483 0 1 2477 value=2 prim::Constant pnnx_484 0 1 2478 value=3 prim::Constant pnnx_485 0 1 2479 value=6 prim::Constant pnnx_486 0 1 2480 value=4 prim::Constant pnnx_487 0 1 2481 value=-2 prim::Constant pnnx_488 0 1 2482 value=-1 prim::Constant pnnx_489 0 1 2483 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.1.attn 0 1 relative_position_bias_table.5 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.5=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.1.attn 0 1 relative_position_index.5 @relative_position_index=(64,64)i64 #relative_position_index.5=(64,64)i64 Tensor.view Tensor.view_986 2 1 2467 2468 x_windows.5 $input=2467 $shape=2468 #2467=(1,6,6,8,8,192)f32 #x_windows.5=(36,8,8,192)f32 Tensor.view Tensor.view_987 2 1 x_windows.5 2470 x2.5 $input=x_windows.5 $shape=2470 #x_windows.5=(36,8,8,192)f32 #x2.5=(36,64,192)f32 aten::size pnnx_490 2 1 x2.5 2475 2491 #x2.5=(36,64,192)f32 prim::NumToTensor pnnx_491 1 1 2491 B_.5 aten::Int pnnx_492 1 1 B_.5 2493 aten::Int pnnx_493 1 1 B_.5 2494 aten::size pnnx_494 2 1 x2.5 2476 2495 #x2.5=(36,64,192)f32 prim::NumToTensor pnnx_495 1 1 2495 N.5 aten::Int pnnx_496 1 1 N.5 2497 aten::Int pnnx_497 1 1 N.5 2498 aten::Int pnnx_498 1 1 N.5 2499 aten::Int pnnx_499 1 1 N.5 2500 aten::Int pnnx_500 1 1 N.5 2501 aten::Int pnnx_501 1 1 N.5 2502 aten::size pnnx_502 2 1 x2.5 2477 2503 #x2.5=(36,64,192)f32 prim::NumToTensor pnnx_503 1 1 2503 C.17 aten::Int pnnx_504 1 1 C.17 2505 nn.Linear layers_dfe.0.residual_group.blocks.1.attn.qkv 1 1 x2.5 2506 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.5=(36,64,192)f32 #2506=(36,64,576)f32 aten::div pnnx_505 3 1 C.17 2474 2473 2507 aten::Int pnnx_506 1 1 2507 2508 prim::ListConstruct pnnx_507 5 1 2494 2502 2478 2479 2508 2509 prim::Constant pnnx_509 0 1 20101 value=2 prim::Constant pnnx_510 0 1 20102 value=0 prim::Constant pnnx_511 0 1 20103 value=3 prim::Constant pnnx_512 0 1 20104 value=1 prim::ListConstruct pnnx_513 5 1 20101 20102 20103 20104 2480 2511 Tensor.reshape Tensor.reshape_436 2 1 2506 2509 2510 $input=2506 $shape=2509 #2506=(36,64,576)f32 #2510=(36,64,3,6,32)f32 prim::Constant pnnx_515 0 1 20105 value=0 prim::Constant pnnx_516 0 1 20106 value=0 prim::Constant pnnx_518 0 1 20107 value=0 prim::Constant pnnx_519 0 1 20108 value=1 prim::Constant pnnx_521 0 1 20109 value=0 prim::Constant pnnx_522 0 1 20110 value=2 torch.permute torch.permute_2533 2 1 2510 2511 qkv0.5 $input=2510 $dims=2511 #2510=(36,64,3,6,32)f32 #qkv0.5=(3,36,6,64,32)f32 Tensor.select Tensor.select_653 3 1 qkv0.5 20105 20106 q.5 $input=qkv0.5 $dim=20105 $index=20106 #qkv0.5=(3,36,6,64,32)f32 #q.5=(36,6,64,32)f32 aten::mul pnnx_524 2 1 q.5 2472 q0.5 #q.5=(36,6,64,32)f32 #q0.5=(36,6,64,32)f32 Tensor.select Tensor.select_654 3 1 qkv0.5 20107 20108 k.5 $input=qkv0.5 $dim=20107 $index=20108 #qkv0.5=(3,36,6,64,32)f32 #k.5=(36,6,64,32)f32 prim::Constant pnnx_527 0 1 20111 value=-1 prim::ListConstruct pnnx_528 1 1 20111 2519 Tensor.view Tensor.view_988 2 1 relative_position_index.5 2519 2520 $input=relative_position_index.5 $shape=2519 #relative_position_index.5=(64,64)i64 #2520=(4096)i64 prim::ListConstruct pnnx_530 1 1 2520 2521 #2520=(4096)i64 prim::Constant pnnx_532 0 1 20112 value=64 prim::Constant pnnx_533 0 1 20113 value=-1 prim::ListConstruct pnnx_534 3 1 2483 20112 20113 2523 Tensor.index Tensor.index_327 2 1 relative_position_bias_table.5 2521 2522 $input=relative_position_bias_table.5 $expr=2521 #relative_position_bias_table.5=(225,6)f32 #2522=(4096,6)f32 prim::Constant pnnx_536 0 1 20114 value=2 prim::Constant pnnx_537 0 1 20115 value=0 prim::Constant pnnx_538 0 1 20116 value=1 prim::ListConstruct pnnx_539 3 1 20114 20115 20116 2525 Tensor.view Tensor.view_989 2 1 2522 2523 relative_position_bias.5 $input=2522 $shape=2523 #2522=(4096,6)f32 #relative_position_bias.5=(64,64,6)f32 prim::Constant pnnx_543 0 1 20118 value=0 torch.permute torch.permute_2534 2 1 relative_position_bias.5 2525 2526 $input=relative_position_bias.5 $dims=2525 #relative_position_bias.5=(64,64,6)f32 #2526=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_6 1 1 2526 relative_position_bias0.5 memory_format=torch.contiguous_format $input=2526 #2526=(6,64,64)f32 #relative_position_bias0.5=(6,64,64)f32 prim::Constant pnnx_545 0 1 20119 value=1 torch.transpose torch.transpose_2963 3 1 k.5 2481 2482 2517 $input=k.5 $dim0=2481 $dim1=2482 #k.5=(36,6,64,32)f32 #2517=(36,6,32,64)f32 torch.matmul torch.matmul_2206 2 1 q0.5 2517 attn.11 $input=q0.5 $other=2517 #q0.5=(36,6,64,32)f32 #2517=(36,6,32,64)f32 #attn.11=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3219 2 1 relative_position_bias0.5 20118 2528 $input=relative_position_bias0.5 $dim=20118 #relative_position_bias0.5=(6,64,64)f32 #2528=(1,6,64,64)f32 aten::add pnnx_546 3 1 attn.11 2528 20119 attn0.3 #attn.11=(36,6,64,64)f32 #2528=(1,6,64,64)f32 #attn0.3=(36,6,64,64)f32 prim::Constant pnnx_547 0 1 20120 value=0 aten::size pnnx_548 2 1 attn_mask.3 20120 2530 #attn_mask.3=(36,64,64)f32 prim::NumToTensor pnnx_549 1 1 2530 other.3 aten::Int pnnx_550 1 1 other.3 2532 prim::Constant pnnx_551 0 1 20121 value=trunc aten::div pnnx_552 3 1 B_.5 other.3 20121 2533 aten::Int pnnx_553 1 1 2533 2534 prim::Constant pnnx_554 0 1 20122 value=6 prim::ListConstruct pnnx_555 5 1 2534 2532 20122 2501 2500 2535 prim::Constant pnnx_557 0 1 20123 value=1 prim::Constant pnnx_559 0 1 20124 value=0 prim::Constant pnnx_561 0 1 20125 value=1 Tensor.view Tensor.view_990 2 1 attn0.3 2535 2536 $input=attn0.3 $shape=2535 #attn0.3=(36,6,64,64)f32 #2536=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3220 2 1 attn_mask.3 20123 2537 $input=attn_mask.3 $dim=20123 #attn_mask.3=(36,64,64)f32 #2537=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3221 2 1 2537 20124 2538 $input=2537 $dim=20124 #2537=(36,1,64,64)f32 #2538=(1,36,1,64,64)f32 aten::add pnnx_562 3 1 2536 2538 20125 attn1.3 #2536=(1,36,6,64,64)f32 #2538=(1,36,1,64,64)f32 #attn1.3=(1,36,6,64,64)f32 prim::Constant pnnx_563 0 1 20126 value=-1 prim::Constant pnnx_564 0 1 20127 value=6 prim::ListConstruct pnnx_565 4 1 20126 20127 2499 2498 2540 Tensor.view Tensor.view_991 2 1 attn1.3 2540 input.15 $input=attn1.3 $shape=2540 #attn1.3=(1,36,6,64,64)f32 #input.15=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.1.attn.softmax 1 1 input.15 2542 dim=-1 #input.15=(36,6,64,64)f32 #2542=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.attn.attn_drop 1 1 2542 2543 #2542=(36,6,64,64)f32 #2543=(36,6,64,64)f32 Tensor.select Tensor.select_655 3 1 qkv0.5 20109 20110 v.5 $input=qkv0.5 $dim=20109 $index=20110 #qkv0.5=(3,36,6,64,32)f32 #v.5=(36,6,64,32)f32 prim::Constant pnnx_568 0 1 20128 value=1 prim::Constant pnnx_569 0 1 20129 value=2 torch.matmul torch.matmul_2207 2 1 2543 v.5 2544 $input=2543 $other=v.5 #2543=(36,6,64,64)f32 #v.5=(36,6,64,32)f32 #2544=(36,6,64,32)f32 prim::ListConstruct pnnx_571 3 1 2493 2497 2505 2546 torch.transpose torch.transpose_2964 3 1 2544 20128 20129 2545 $input=2544 $dim0=20128 $dim1=20129 #2544=(36,6,64,32)f32 #2545=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_437 2 1 2545 2546 input0.7 $input=2545 $shape=2546 #2545=(36,64,6,32)f32 #input0.7=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.1.attn.proj 1 1 input0.7 2548 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.7=(36,64,192)f32 #2548=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.attn.proj_drop 1 1 2548 2549 #2548=(36,64,192)f32 #2549=(36,64,192)f32 prim::Constant pnnx_573 0 1 20130 value=-1 prim::Constant pnnx_574 0 1 20131 value=8 prim::Constant pnnx_575 0 1 20132 value=8 prim::ListConstruct pnnx_576 4 1 20130 20131 20132 2439 2550 prim::Constant pnnx_578 0 1 20133 value=8 prim::Constant pnnx_579 0 1 20134 value=trunc aten::div pnnx_580 3 1 H.1 20133 20134 2552 aten::Int pnnx_581 1 1 2552 2553 prim::Constant pnnx_582 0 1 20135 value=8 prim::Constant pnnx_583 0 1 20136 value=trunc aten::div pnnx_584 3 1 W.1 20135 20136 2554 aten::Int pnnx_585 1 1 2554 2555 prim::Constant pnnx_586 0 1 20137 value=1 prim::Constant pnnx_587 0 1 20138 value=8 prim::Constant pnnx_588 0 1 20139 value=8 prim::Constant pnnx_589 0 1 20140 value=-1 prim::ListConstruct pnnx_590 6 1 20137 2553 2555 20138 20139 20140 2556 prim::Constant pnnx_592 0 1 20141 value=0 prim::Constant pnnx_593 0 1 20142 value=1 prim::Constant pnnx_594 0 1 20143 value=3 prim::Constant pnnx_595 0 1 20144 value=2 prim::Constant pnnx_596 0 1 20145 value=4 prim::Constant pnnx_597 0 1 20146 value=5 prim::ListConstruct pnnx_598 6 1 20141 20142 20143 20144 20145 20146 2558 Tensor.view Tensor.view_992 2 1 2549 2550 windows.5 $input=2549 $shape=2550 #2549=(36,64,192)f32 #windows.5=(36,8,8,192)f32 Tensor.view Tensor.view_993 2 1 windows.5 2556 x3.5 $input=windows.5 $shape=2556 #windows.5=(36,8,8,192)f32 #x3.5=(1,6,6,8,8,192)f32 prim::Constant pnnx_602 0 1 20148 value=1 prim::Constant pnnx_603 0 1 20149 value=-1 prim::ListConstruct pnnx_604 4 1 20148 388 628 20149 2561 torch.permute torch.permute_2535 2 1 x3.5 2558 2559 $input=x3.5 $dims=2558 #x3.5=(1,6,6,8,8,192)f32 #2559=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_7 1 1 2559 2560 memory_format=torch.contiguous_format $input=2559 #2559=(1,6,8,6,8,192)f32 #2560=(1,6,8,6,8,192)f32 prim::Constant pnnx_606 0 1 20150 value=4 prim::Constant pnnx_607 0 1 20151 value=4 prim::ListConstruct pnnx_608 2 1 20150 20151 2563 prim::Constant pnnx_609 0 1 20152 value=1 prim::Constant pnnx_610 0 1 20153 value=2 prim::ListConstruct pnnx_611 2 1 20152 20153 2564 Tensor.view Tensor.view_994 2 1 2560 2561 shifted_x.3 $input=2560 $shape=2561 #2560=(1,6,8,6,8,192)f32 #shifted_x.3=(1,48,48,192)f32 aten::mul pnnx_613 2 1 H.1 W.1 2566 aten::Int pnnx_614 1 1 2566 2567 prim::ListConstruct pnnx_615 3 1 2434 2567 2438 2568 prim::Constant pnnx_617 0 1 2570 value=None prim::Constant pnnx_618 0 1 20154 value=1 torch.roll torch.roll_2421 3 1 shifted_x.3 2563 2564 x4.5 $input=shifted_x.3 $shifts=2563 $dims=2564 #shifted_x.3=(1,48,48,192)f32 #x4.5=(1,48,48,192)f32 Tensor.view Tensor.view_995 2 1 x4.5 2568 x5.3 $input=x4.5 $shape=2568 #x4.5=(1,48,48,192)f32 #x5.3=(1,2304,192)f32 aten::add pnnx_619 3 1 2413 x5.3 20154 input.17 #2413=(1,2304,192)f32 #x5.3=(1,2304,192)f32 #input.17=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.1.norm2 1 1 input.17 2572 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.17=(1,2304,192)f32 #2572=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.1.mlp.fc1 1 1 2572 2577 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #2572=(1,2304,192)f32 #2577=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.1.mlp.act 1 1 2577 2578 #2577=(1,2304,384)f32 #2578=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.mlp.drop 1 1 2578 2579 #2578=(1,2304,384)f32 #2579=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.1.mlp.fc2 1 1 2579 2580 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #2579=(1,2304,384)f32 #2580=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.mlp.drop 1 1 2580 2581 #2580=(1,2304,192)f32 #2581=(1,2304,192)f32 prim::Constant pnnx_620 0 1 2582 value=None prim::Constant pnnx_621 0 1 20155 value=1 aten::add pnnx_622 3 1 input.17 2581 20155 2583 #input.17=(1,2304,192)f32 #2581=(1,2304,192)f32 #2583=(1,2304,192)f32 prim::Constant pnnx_623 0 1 2584 value=trunc prim::Constant pnnx_624 0 1 2585 value=8 prim::Constant pnnx_625 0 1 2586 value=0 prim::Constant pnnx_626 0 1 2587 value=2 prim::Constant pnnx_627 0 1 2588 value=1 prim::Constant pnnx_628 0 1 2589 value=3 prim::Constant pnnx_629 0 1 2590 value=8 prim::Constant pnnx_630 0 1 2591 value=4 prim::Constant pnnx_631 0 1 2592 value=5 prim::Constant pnnx_632 0 1 2593 value=-1 prim::Constant pnnx_633 0 1 2594 value=64 aten::size pnnx_634 2 1 2583 2586 2600 #2583=(1,2304,192)f32 prim::NumToTensor pnnx_635 1 1 2600 B.9 aten::Int pnnx_636 1 1 B.9 2602 aten::Int pnnx_637 1 1 B.9 2603 aten::size pnnx_638 2 1 2583 2587 2604 #2583=(1,2304,192)f32 prim::NumToTensor pnnx_639 1 1 2604 C.19 aten::Int pnnx_640 1 1 C.19 2606 aten::Int pnnx_641 1 1 C.19 2607 aten::Int pnnx_642 1 1 C.19 2608 aten::Int pnnx_643 1 1 C.19 2609 nn.LayerNorm layers_dfe.0.residual_group.blocks.2.norm1 1 1 2583 2610 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #2583=(1,2304,192)f32 #2610=(1,2304,192)f32 prim::ListConstruct pnnx_644 4 1 2603 385 625 2609 2611 prim::Constant pnnx_646 0 1 20156 value=0 Tensor.view Tensor.view_996 2 1 2610 2611 x.7 $input=2610 $shape=2611 #2610=(1,2304,192)f32 #x.7=(1,48,48,192)f32 aten::size pnnx_647 2 1 x.7 20156 2613 #x.7=(1,48,48,192)f32 prim::NumToTensor pnnx_648 1 1 2613 B0.7 aten::Int pnnx_649 1 1 B0.7 2615 aten::size pnnx_650 2 1 x.7 2588 2616 #x.7=(1,48,48,192)f32 prim::NumToTensor pnnx_651 1 1 2616 2617 prim::Constant pnnx_652 0 1 20157 value=2 aten::size pnnx_653 2 1 x.7 20157 2618 #x.7=(1,48,48,192)f32 prim::NumToTensor pnnx_654 1 1 2618 2619 aten::size pnnx_655 2 1 x.7 2589 2620 #x.7=(1,48,48,192)f32 prim::NumToTensor pnnx_656 1 1 2620 C0.7 aten::Int pnnx_657 1 1 C0.7 2622 aten::Int pnnx_658 1 1 C0.7 2623 aten::div pnnx_659 3 1 2617 2585 2584 2624 aten::Int pnnx_660 1 1 2624 2625 prim::Constant pnnx_661 0 1 20158 value=8 prim::Constant pnnx_662 0 1 20159 value=trunc aten::div pnnx_663 3 1 2619 20158 20159 2626 aten::Int pnnx_664 1 1 2626 2627 prim::Constant pnnx_665 0 1 20160 value=8 prim::ListConstruct pnnx_666 6 1 2615 2625 2590 2627 20160 2623 2628 prim::Constant pnnx_668 0 1 20161 value=0 prim::Constant pnnx_669 0 1 20162 value=1 prim::Constant pnnx_670 0 1 20163 value=3 prim::Constant pnnx_671 0 1 20164 value=2 prim::ListConstruct pnnx_672 6 1 20161 20162 20163 20164 2591 2592 2630 Tensor.view Tensor.view_997 2 1 x.7 2628 x0.7 $input=x.7 $shape=2628 #x.7=(1,48,48,192)f32 #x0.7=(1,6,8,6,8,192)f32 prim::Constant pnnx_676 0 1 20166 value=8 prim::Constant pnnx_677 0 1 20167 value=8 prim::ListConstruct pnnx_678 4 1 2593 20166 20167 2622 2633 torch.permute torch.permute_2536 2 1 x0.7 2630 2631 $input=x0.7 $dims=2630 #x0.7=(1,6,8,6,8,192)f32 #2631=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_8 1 1 2631 2632 memory_format=torch.contiguous_format $input=2631 #2631=(1,6,6,8,8,192)f32 #2632=(1,6,6,8,8,192)f32 prim::Constant pnnx_680 0 1 20168 value=-1 prim::ListConstruct pnnx_681 3 1 20168 2594 2608 2635 prim::Constant pnnx_683 0 1 2637 value=1.767767e-01 prim::Constant pnnx_684 0 1 2638 value=trunc prim::Constant pnnx_685 0 1 2639 value=6 prim::Constant pnnx_686 0 1 2640 value=0 prim::Constant pnnx_687 0 1 2641 value=1 prim::Constant pnnx_688 0 1 2642 value=2 prim::Constant pnnx_689 0 1 2643 value=3 prim::Constant pnnx_690 0 1 2644 value=6 prim::Constant pnnx_691 0 1 2645 value=4 prim::Constant pnnx_692 0 1 2646 value=-2 prim::Constant pnnx_693 0 1 2647 value=-1 prim::Constant pnnx_694 0 1 2648 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.2.attn 0 1 relative_position_bias_table.7 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.7=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.2.attn 0 1 relative_position_index.7 @relative_position_index=(64,64)i64 #relative_position_index.7=(64,64)i64 Tensor.view Tensor.view_998 2 1 2632 2633 x_windows.7 $input=2632 $shape=2633 #2632=(1,6,6,8,8,192)f32 #x_windows.7=(36,8,8,192)f32 Tensor.view Tensor.view_999 2 1 x_windows.7 2635 x1.7 $input=x_windows.7 $shape=2635 #x_windows.7=(36,8,8,192)f32 #x1.7=(36,64,192)f32 aten::size pnnx_695 2 1 x1.7 2640 2656 #x1.7=(36,64,192)f32 prim::NumToTensor pnnx_696 1 1 2656 B_.7 aten::Int pnnx_697 1 1 B_.7 2658 aten::Int pnnx_698 1 1 B_.7 2659 aten::size pnnx_699 2 1 x1.7 2641 2660 #x1.7=(36,64,192)f32 prim::NumToTensor pnnx_700 1 1 2660 N.7 aten::Int pnnx_701 1 1 N.7 2662 aten::Int pnnx_702 1 1 N.7 2663 aten::size pnnx_703 2 1 x1.7 2642 2664 #x1.7=(36,64,192)f32 prim::NumToTensor pnnx_704 1 1 2664 C.21 aten::Int pnnx_705 1 1 C.21 2666 nn.Linear layers_dfe.0.residual_group.blocks.2.attn.qkv 1 1 x1.7 2667 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.7=(36,64,192)f32 #2667=(36,64,576)f32 aten::div pnnx_706 3 1 C.21 2639 2638 2668 aten::Int pnnx_707 1 1 2668 2669 prim::ListConstruct pnnx_708 5 1 2659 2663 2643 2644 2669 2670 prim::Constant pnnx_710 0 1 20169 value=2 prim::Constant pnnx_711 0 1 20170 value=0 prim::Constant pnnx_712 0 1 20171 value=3 prim::Constant pnnx_713 0 1 20172 value=1 prim::ListConstruct pnnx_714 5 1 20169 20170 20171 20172 2645 2672 Tensor.reshape Tensor.reshape_438 2 1 2667 2670 2671 $input=2667 $shape=2670 #2667=(36,64,576)f32 #2671=(36,64,3,6,32)f32 prim::Constant pnnx_716 0 1 20173 value=0 prim::Constant pnnx_717 0 1 20174 value=0 prim::Constant pnnx_719 0 1 20175 value=0 prim::Constant pnnx_720 0 1 20176 value=1 prim::Constant pnnx_722 0 1 20177 value=0 prim::Constant pnnx_723 0 1 20178 value=2 torch.permute torch.permute_2537 2 1 2671 2672 qkv0.7 $input=2671 $dims=2672 #2671=(36,64,3,6,32)f32 #qkv0.7=(3,36,6,64,32)f32 Tensor.select Tensor.select_656 3 1 qkv0.7 20173 20174 q.7 $input=qkv0.7 $dim=20173 $index=20174 #qkv0.7=(3,36,6,64,32)f32 #q.7=(36,6,64,32)f32 aten::mul pnnx_725 2 1 q.7 2637 q0.7 #q.7=(36,6,64,32)f32 #q0.7=(36,6,64,32)f32 Tensor.select Tensor.select_657 3 1 qkv0.7 20175 20176 k.7 $input=qkv0.7 $dim=20175 $index=20176 #qkv0.7=(3,36,6,64,32)f32 #k.7=(36,6,64,32)f32 prim::Constant pnnx_728 0 1 20179 value=-1 prim::ListConstruct pnnx_729 1 1 20179 2680 Tensor.view Tensor.view_1000 2 1 relative_position_index.7 2680 2681 $input=relative_position_index.7 $shape=2680 #relative_position_index.7=(64,64)i64 #2681=(4096)i64 prim::ListConstruct pnnx_731 1 1 2681 2682 #2681=(4096)i64 prim::Constant pnnx_733 0 1 20180 value=64 prim::Constant pnnx_734 0 1 20181 value=-1 prim::ListConstruct pnnx_735 3 1 2648 20180 20181 2684 Tensor.index Tensor.index_328 2 1 relative_position_bias_table.7 2682 2683 $input=relative_position_bias_table.7 $expr=2682 #relative_position_bias_table.7=(225,6)f32 #2683=(4096,6)f32 prim::Constant pnnx_737 0 1 20182 value=2 prim::Constant pnnx_738 0 1 20183 value=0 prim::Constant pnnx_739 0 1 20184 value=1 prim::ListConstruct pnnx_740 3 1 20182 20183 20184 2686 Tensor.view Tensor.view_1001 2 1 2683 2684 relative_position_bias.7 $input=2683 $shape=2684 #2683=(4096,6)f32 #relative_position_bias.7=(64,64,6)f32 prim::Constant pnnx_744 0 1 20186 value=0 torch.permute torch.permute_2538 2 1 relative_position_bias.7 2686 2687 $input=relative_position_bias.7 $dims=2686 #relative_position_bias.7=(64,64,6)f32 #2687=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_9 1 1 2687 relative_position_bias0.7 memory_format=torch.contiguous_format $input=2687 #2687=(6,64,64)f32 #relative_position_bias0.7=(6,64,64)f32 prim::Constant pnnx_746 0 1 20187 value=1 torch.transpose torch.transpose_2965 3 1 k.7 2646 2647 2678 $input=k.7 $dim0=2646 $dim1=2647 #k.7=(36,6,64,32)f32 #2678=(36,6,32,64)f32 torch.matmul torch.matmul_2208 2 1 q0.7 2678 attn.15 $input=q0.7 $other=2678 #q0.7=(36,6,64,32)f32 #2678=(36,6,32,64)f32 #attn.15=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3222 2 1 relative_position_bias0.7 20186 2689 $input=relative_position_bias0.7 $dim=20186 #relative_position_bias0.7=(6,64,64)f32 #2689=(1,6,64,64)f32 aten::add pnnx_747 3 1 attn.15 2689 20187 input.19 #attn.15=(36,6,64,64)f32 #2689=(1,6,64,64)f32 #input.19=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.2.attn.softmax 1 1 input.19 2691 dim=-1 #input.19=(36,6,64,64)f32 #2691=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.attn.attn_drop 1 1 2691 2692 #2691=(36,6,64,64)f32 #2692=(36,6,64,64)f32 Tensor.select Tensor.select_658 3 1 qkv0.7 20177 20178 v.7 $input=qkv0.7 $dim=20177 $index=20178 #qkv0.7=(3,36,6,64,32)f32 #v.7=(36,6,64,32)f32 prim::Constant pnnx_749 0 1 20188 value=1 prim::Constant pnnx_750 0 1 20189 value=2 torch.matmul torch.matmul_2209 2 1 2692 v.7 2693 $input=2692 $other=v.7 #2692=(36,6,64,64)f32 #v.7=(36,6,64,32)f32 #2693=(36,6,64,32)f32 prim::ListConstruct pnnx_752 3 1 2658 2662 2666 2695 torch.transpose torch.transpose_2966 3 1 2693 20188 20189 2694 $input=2693 $dim0=20188 $dim1=20189 #2693=(36,6,64,32)f32 #2694=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_439 2 1 2694 2695 input0.9 $input=2694 $shape=2695 #2694=(36,64,6,32)f32 #input0.9=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.2.attn.proj 1 1 input0.9 2697 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.9=(36,64,192)f32 #2697=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.attn.proj_drop 1 1 2697 2698 #2697=(36,64,192)f32 #2698=(36,64,192)f32 prim::Constant pnnx_754 0 1 20190 value=-1 prim::Constant pnnx_755 0 1 20191 value=8 prim::Constant pnnx_756 0 1 20192 value=8 prim::ListConstruct pnnx_757 4 1 20190 20191 20192 2607 2699 prim::Constant pnnx_759 0 1 20193 value=8 prim::Constant pnnx_760 0 1 20194 value=trunc aten::div pnnx_761 3 1 H.1 20193 20194 2701 aten::Int pnnx_762 1 1 2701 2702 prim::Constant pnnx_763 0 1 20195 value=8 prim::Constant pnnx_764 0 1 20196 value=trunc aten::div pnnx_765 3 1 W.1 20195 20196 2703 aten::Int pnnx_766 1 1 2703 2704 prim::Constant pnnx_767 0 1 20197 value=1 prim::Constant pnnx_768 0 1 20198 value=8 prim::Constant pnnx_769 0 1 20199 value=8 prim::Constant pnnx_770 0 1 20200 value=-1 prim::ListConstruct pnnx_771 6 1 20197 2702 2704 20198 20199 20200 2705 prim::Constant pnnx_773 0 1 20201 value=0 prim::Constant pnnx_774 0 1 20202 value=1 prim::Constant pnnx_775 0 1 20203 value=3 prim::Constant pnnx_776 0 1 20204 value=2 prim::Constant pnnx_777 0 1 20205 value=4 prim::Constant pnnx_778 0 1 20206 value=5 prim::ListConstruct pnnx_779 6 1 20201 20202 20203 20204 20205 20206 2707 Tensor.view Tensor.view_1002 2 1 2698 2699 windows.7 $input=2698 $shape=2699 #2698=(36,64,192)f32 #windows.7=(36,8,8,192)f32 Tensor.view Tensor.view_1003 2 1 windows.7 2705 x2.7 $input=windows.7 $shape=2705 #windows.7=(36,8,8,192)f32 #x2.7=(1,6,6,8,8,192)f32 prim::Constant pnnx_783 0 1 20208 value=1 prim::Constant pnnx_784 0 1 20209 value=-1 prim::ListConstruct pnnx_785 4 1 20208 382 622 20209 2710 torch.permute torch.permute_2539 2 1 x2.7 2707 2708 $input=x2.7 $dims=2707 #x2.7=(1,6,6,8,8,192)f32 #2708=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_10 1 1 2708 2709 memory_format=torch.contiguous_format $input=2708 #2708=(1,6,8,6,8,192)f32 #2709=(1,6,8,6,8,192)f32 aten::mul pnnx_787 2 1 H.1 W.1 2712 aten::Int pnnx_788 1 1 2712 2713 prim::ListConstruct pnnx_789 3 1 2602 2713 2606 2714 prim::Constant pnnx_791 0 1 2716 value=None prim::Constant pnnx_792 0 1 20210 value=1 Tensor.view Tensor.view_1004 2 1 2709 2710 x3.7 $input=2709 $shape=2710 #2709=(1,6,8,6,8,192)f32 #x3.7=(1,48,48,192)f32 Tensor.view Tensor.view_1005 2 1 x3.7 2714 x4.7 $input=x3.7 $shape=2714 #x3.7=(1,48,48,192)f32 #x4.7=(1,2304,192)f32 aten::add pnnx_793 3 1 2583 x4.7 20210 input.21 #2583=(1,2304,192)f32 #x4.7=(1,2304,192)f32 #input.21=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.2.norm2 1 1 input.21 2718 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.21=(1,2304,192)f32 #2718=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.2.mlp.fc1 1 1 2718 2723 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #2718=(1,2304,192)f32 #2723=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.2.mlp.act 1 1 2723 2724 #2723=(1,2304,384)f32 #2724=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.mlp.drop 1 1 2724 2725 #2724=(1,2304,384)f32 #2725=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.2.mlp.fc2 1 1 2725 2726 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #2725=(1,2304,384)f32 #2726=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.mlp.drop 1 1 2726 2727 #2726=(1,2304,192)f32 #2727=(1,2304,192)f32 prim::Constant pnnx_794 0 1 2728 value=None prim::Constant pnnx_795 0 1 20211 value=1 aten::add pnnx_796 3 1 input.21 2727 20211 2729 #input.21=(1,2304,192)f32 #2727=(1,2304,192)f32 #2729=(1,2304,192)f32 prim::Constant pnnx_797 0 1 2730 value=trunc prim::Constant pnnx_798 0 1 2731 value=8 prim::Constant pnnx_799 0 1 2732 value=0 prim::Constant pnnx_800 0 1 2733 value=2 prim::Constant pnnx_801 0 1 2734 value=-4 prim::Constant pnnx_802 0 1 2735 value=1 prim::Constant pnnx_803 0 1 2736 value=3 prim::Constant pnnx_804 0 1 2737 value=8 prim::Constant pnnx_805 0 1 2738 value=4 prim::Constant pnnx_806 0 1 2739 value=5 prim::Constant pnnx_807 0 1 2740 value=-1 prim::Constant pnnx_808 0 1 2741 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.3 0 1 attn_mask.5 @attn_mask=(36,64,64)f32 #attn_mask.5=(36,64,64)f32 aten::size pnnx_809 2 1 2729 2732 2748 #2729=(1,2304,192)f32 prim::NumToTensor pnnx_810 1 1 2748 B.11 aten::Int pnnx_811 1 1 B.11 2750 aten::Int pnnx_812 1 1 B.11 2751 aten::size pnnx_813 2 1 2729 2733 2752 #2729=(1,2304,192)f32 prim::NumToTensor pnnx_814 1 1 2752 C.23 aten::Int pnnx_815 1 1 C.23 2754 aten::Int pnnx_816 1 1 C.23 2755 aten::Int pnnx_817 1 1 C.23 2756 aten::Int pnnx_818 1 1 C.23 2757 nn.LayerNorm layers_dfe.0.residual_group.blocks.3.norm1 1 1 2729 2758 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #2729=(1,2304,192)f32 #2758=(1,2304,192)f32 prim::ListConstruct pnnx_819 4 1 2751 379 619 2757 2759 prim::Constant pnnx_821 0 1 20212 value=-4 prim::ListConstruct pnnx_822 2 1 2734 20212 2761 prim::Constant pnnx_823 0 1 20213 value=2 prim::ListConstruct pnnx_824 2 1 2735 20213 2762 Tensor.view Tensor.view_1006 2 1 2758 2759 x.9 $input=2758 $shape=2759 #2758=(1,2304,192)f32 #x.9=(1,48,48,192)f32 prim::Constant pnnx_826 0 1 20214 value=0 torch.roll torch.roll_2422 3 1 x.9 2761 2762 x0.9 $input=x.9 $shifts=2761 $dims=2762 #x.9=(1,48,48,192)f32 #x0.9=(1,48,48,192)f32 aten::size pnnx_827 2 1 x0.9 20214 2764 #x0.9=(1,48,48,192)f32 prim::NumToTensor pnnx_828 1 1 2764 B0.9 aten::Int pnnx_829 1 1 B0.9 2766 prim::Constant pnnx_830 0 1 20215 value=1 aten::size pnnx_831 2 1 x0.9 20215 2767 #x0.9=(1,48,48,192)f32 prim::NumToTensor pnnx_832 1 1 2767 2768 prim::Constant pnnx_833 0 1 20216 value=2 aten::size pnnx_834 2 1 x0.9 20216 2769 #x0.9=(1,48,48,192)f32 prim::NumToTensor pnnx_835 1 1 2769 2770 aten::size pnnx_836 2 1 x0.9 2736 2771 #x0.9=(1,48,48,192)f32 prim::NumToTensor pnnx_837 1 1 2771 C0.9 aten::Int pnnx_838 1 1 C0.9 2773 aten::Int pnnx_839 1 1 C0.9 2774 aten::div pnnx_840 3 1 2768 2731 2730 2775 aten::Int pnnx_841 1 1 2775 2776 prim::Constant pnnx_842 0 1 20217 value=8 prim::Constant pnnx_843 0 1 20218 value=trunc aten::div pnnx_844 3 1 2770 20217 20218 2777 aten::Int pnnx_845 1 1 2777 2778 prim::Constant pnnx_846 0 1 20219 value=8 prim::ListConstruct pnnx_847 6 1 2766 2776 2737 2778 20219 2774 2779 prim::Constant pnnx_849 0 1 20220 value=0 prim::Constant pnnx_850 0 1 20221 value=1 prim::Constant pnnx_851 0 1 20222 value=3 prim::Constant pnnx_852 0 1 20223 value=2 prim::ListConstruct pnnx_853 6 1 20220 20221 20222 20223 2738 2739 2781 Tensor.view Tensor.view_1007 2 1 x0.9 2779 x1.9 $input=x0.9 $shape=2779 #x0.9=(1,48,48,192)f32 #x1.9=(1,6,8,6,8,192)f32 prim::Constant pnnx_857 0 1 20225 value=8 prim::Constant pnnx_858 0 1 20226 value=8 prim::ListConstruct pnnx_859 4 1 2740 20225 20226 2773 2784 torch.permute torch.permute_2540 2 1 x1.9 2781 2782 $input=x1.9 $dims=2781 #x1.9=(1,6,8,6,8,192)f32 #2782=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_11 1 1 2782 2783 memory_format=torch.contiguous_format $input=2782 #2782=(1,6,6,8,8,192)f32 #2783=(1,6,6,8,8,192)f32 prim::Constant pnnx_861 0 1 20227 value=-1 prim::ListConstruct pnnx_862 3 1 20227 2741 2756 2786 prim::Constant pnnx_864 0 1 2788 value=1.767767e-01 prim::Constant pnnx_865 0 1 2789 value=trunc prim::Constant pnnx_866 0 1 2790 value=6 prim::Constant pnnx_867 0 1 2791 value=0 prim::Constant pnnx_868 0 1 2792 value=1 prim::Constant pnnx_869 0 1 2793 value=2 prim::Constant pnnx_870 0 1 2794 value=3 prim::Constant pnnx_871 0 1 2795 value=6 prim::Constant pnnx_872 0 1 2796 value=4 prim::Constant pnnx_873 0 1 2797 value=-2 prim::Constant pnnx_874 0 1 2798 value=-1 prim::Constant pnnx_875 0 1 2799 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.3.attn 0 1 relative_position_bias_table.9 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.9=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.3.attn 0 1 relative_position_index.9 @relative_position_index=(64,64)i64 #relative_position_index.9=(64,64)i64 Tensor.view Tensor.view_1008 2 1 2783 2784 x_windows.9 $input=2783 $shape=2784 #2783=(1,6,6,8,8,192)f32 #x_windows.9=(36,8,8,192)f32 Tensor.view Tensor.view_1009 2 1 x_windows.9 2786 x2.9 $input=x_windows.9 $shape=2786 #x_windows.9=(36,8,8,192)f32 #x2.9=(36,64,192)f32 aten::size pnnx_876 2 1 x2.9 2791 2807 #x2.9=(36,64,192)f32 prim::NumToTensor pnnx_877 1 1 2807 B_.9 aten::Int pnnx_878 1 1 B_.9 2809 aten::Int pnnx_879 1 1 B_.9 2810 aten::size pnnx_880 2 1 x2.9 2792 2811 #x2.9=(36,64,192)f32 prim::NumToTensor pnnx_881 1 1 2811 N.9 aten::Int pnnx_882 1 1 N.9 2813 aten::Int pnnx_883 1 1 N.9 2814 aten::Int pnnx_884 1 1 N.9 2815 aten::Int pnnx_885 1 1 N.9 2816 aten::Int pnnx_886 1 1 N.9 2817 aten::Int pnnx_887 1 1 N.9 2818 aten::size pnnx_888 2 1 x2.9 2793 2819 #x2.9=(36,64,192)f32 prim::NumToTensor pnnx_889 1 1 2819 C.25 aten::Int pnnx_890 1 1 C.25 2821 nn.Linear layers_dfe.0.residual_group.blocks.3.attn.qkv 1 1 x2.9 2822 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.9=(36,64,192)f32 #2822=(36,64,576)f32 aten::div pnnx_891 3 1 C.25 2790 2789 2823 aten::Int pnnx_892 1 1 2823 2824 prim::ListConstruct pnnx_893 5 1 2810 2818 2794 2795 2824 2825 prim::Constant pnnx_895 0 1 20228 value=2 prim::Constant pnnx_896 0 1 20229 value=0 prim::Constant pnnx_897 0 1 20230 value=3 prim::Constant pnnx_898 0 1 20231 value=1 prim::ListConstruct pnnx_899 5 1 20228 20229 20230 20231 2796 2827 Tensor.reshape Tensor.reshape_440 2 1 2822 2825 2826 $input=2822 $shape=2825 #2822=(36,64,576)f32 #2826=(36,64,3,6,32)f32 prim::Constant pnnx_901 0 1 20232 value=0 prim::Constant pnnx_902 0 1 20233 value=0 prim::Constant pnnx_904 0 1 20234 value=0 prim::Constant pnnx_905 0 1 20235 value=1 prim::Constant pnnx_907 0 1 20236 value=0 prim::Constant pnnx_908 0 1 20237 value=2 torch.permute torch.permute_2541 2 1 2826 2827 qkv0.9 $input=2826 $dims=2827 #2826=(36,64,3,6,32)f32 #qkv0.9=(3,36,6,64,32)f32 Tensor.select Tensor.select_659 3 1 qkv0.9 20232 20233 q.9 $input=qkv0.9 $dim=20232 $index=20233 #qkv0.9=(3,36,6,64,32)f32 #q.9=(36,6,64,32)f32 aten::mul pnnx_910 2 1 q.9 2788 q0.9 #q.9=(36,6,64,32)f32 #q0.9=(36,6,64,32)f32 Tensor.select Tensor.select_660 3 1 qkv0.9 20234 20235 k.9 $input=qkv0.9 $dim=20234 $index=20235 #qkv0.9=(3,36,6,64,32)f32 #k.9=(36,6,64,32)f32 prim::Constant pnnx_913 0 1 20238 value=-1 prim::ListConstruct pnnx_914 1 1 20238 2835 Tensor.view Tensor.view_1010 2 1 relative_position_index.9 2835 2836 $input=relative_position_index.9 $shape=2835 #relative_position_index.9=(64,64)i64 #2836=(4096)i64 prim::ListConstruct pnnx_916 1 1 2836 2837 #2836=(4096)i64 prim::Constant pnnx_918 0 1 20239 value=64 prim::Constant pnnx_919 0 1 20240 value=-1 prim::ListConstruct pnnx_920 3 1 2799 20239 20240 2839 Tensor.index Tensor.index_329 2 1 relative_position_bias_table.9 2837 2838 $input=relative_position_bias_table.9 $expr=2837 #relative_position_bias_table.9=(225,6)f32 #2838=(4096,6)f32 prim::Constant pnnx_922 0 1 20241 value=2 prim::Constant pnnx_923 0 1 20242 value=0 prim::Constant pnnx_924 0 1 20243 value=1 prim::ListConstruct pnnx_925 3 1 20241 20242 20243 2841 Tensor.view Tensor.view_1011 2 1 2838 2839 relative_position_bias.9 $input=2838 $shape=2839 #2838=(4096,6)f32 #relative_position_bias.9=(64,64,6)f32 prim::Constant pnnx_929 0 1 20245 value=0 torch.permute torch.permute_2542 2 1 relative_position_bias.9 2841 2842 $input=relative_position_bias.9 $dims=2841 #relative_position_bias.9=(64,64,6)f32 #2842=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_12 1 1 2842 relative_position_bias0.9 memory_format=torch.contiguous_format $input=2842 #2842=(6,64,64)f32 #relative_position_bias0.9=(6,64,64)f32 prim::Constant pnnx_931 0 1 20246 value=1 torch.transpose torch.transpose_2967 3 1 k.9 2797 2798 2833 $input=k.9 $dim0=2797 $dim1=2798 #k.9=(36,6,64,32)f32 #2833=(36,6,32,64)f32 torch.matmul torch.matmul_2210 2 1 q0.9 2833 attn.19 $input=q0.9 $other=2833 #q0.9=(36,6,64,32)f32 #2833=(36,6,32,64)f32 #attn.19=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3223 2 1 relative_position_bias0.9 20245 2844 $input=relative_position_bias0.9 $dim=20245 #relative_position_bias0.9=(6,64,64)f32 #2844=(1,6,64,64)f32 aten::add pnnx_932 3 1 attn.19 2844 20246 attn0.5 #attn.19=(36,6,64,64)f32 #2844=(1,6,64,64)f32 #attn0.5=(36,6,64,64)f32 prim::Constant pnnx_933 0 1 20247 value=0 aten::size pnnx_934 2 1 attn_mask.5 20247 2846 #attn_mask.5=(36,64,64)f32 prim::NumToTensor pnnx_935 1 1 2846 other.5 aten::Int pnnx_936 1 1 other.5 2848 prim::Constant pnnx_937 0 1 20248 value=trunc aten::div pnnx_938 3 1 B_.9 other.5 20248 2849 aten::Int pnnx_939 1 1 2849 2850 prim::Constant pnnx_940 0 1 20249 value=6 prim::ListConstruct pnnx_941 5 1 2850 2848 20249 2817 2816 2851 prim::Constant pnnx_943 0 1 20250 value=1 prim::Constant pnnx_945 0 1 20251 value=0 prim::Constant pnnx_947 0 1 20252 value=1 Tensor.view Tensor.view_1012 2 1 attn0.5 2851 2852 $input=attn0.5 $shape=2851 #attn0.5=(36,6,64,64)f32 #2852=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3224 2 1 attn_mask.5 20250 2853 $input=attn_mask.5 $dim=20250 #attn_mask.5=(36,64,64)f32 #2853=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3225 2 1 2853 20251 2854 $input=2853 $dim=20251 #2853=(36,1,64,64)f32 #2854=(1,36,1,64,64)f32 aten::add pnnx_948 3 1 2852 2854 20252 attn1.5 #2852=(1,36,6,64,64)f32 #2854=(1,36,1,64,64)f32 #attn1.5=(1,36,6,64,64)f32 prim::Constant pnnx_949 0 1 20253 value=-1 prim::Constant pnnx_950 0 1 20254 value=6 prim::ListConstruct pnnx_951 4 1 20253 20254 2815 2814 2856 Tensor.view Tensor.view_1013 2 1 attn1.5 2856 input.23 $input=attn1.5 $shape=2856 #attn1.5=(1,36,6,64,64)f32 #input.23=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.3.attn.softmax 1 1 input.23 2858 dim=-1 #input.23=(36,6,64,64)f32 #2858=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.attn.attn_drop 1 1 2858 2859 #2858=(36,6,64,64)f32 #2859=(36,6,64,64)f32 Tensor.select Tensor.select_661 3 1 qkv0.9 20236 20237 v.9 $input=qkv0.9 $dim=20236 $index=20237 #qkv0.9=(3,36,6,64,32)f32 #v.9=(36,6,64,32)f32 prim::Constant pnnx_954 0 1 20255 value=1 prim::Constant pnnx_955 0 1 20256 value=2 torch.matmul torch.matmul_2211 2 1 2859 v.9 2860 $input=2859 $other=v.9 #2859=(36,6,64,64)f32 #v.9=(36,6,64,32)f32 #2860=(36,6,64,32)f32 prim::ListConstruct pnnx_957 3 1 2809 2813 2821 2862 torch.transpose torch.transpose_2968 3 1 2860 20255 20256 2861 $input=2860 $dim0=20255 $dim1=20256 #2860=(36,6,64,32)f32 #2861=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_441 2 1 2861 2862 input0.11 $input=2861 $shape=2862 #2861=(36,64,6,32)f32 #input0.11=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.3.attn.proj 1 1 input0.11 2864 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.11=(36,64,192)f32 #2864=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.attn.proj_drop 1 1 2864 2865 #2864=(36,64,192)f32 #2865=(36,64,192)f32 prim::Constant pnnx_959 0 1 20257 value=-1 prim::Constant pnnx_960 0 1 20258 value=8 prim::Constant pnnx_961 0 1 20259 value=8 prim::ListConstruct pnnx_962 4 1 20257 20258 20259 2755 2866 prim::Constant pnnx_964 0 1 20260 value=8 prim::Constant pnnx_965 0 1 20261 value=trunc aten::div pnnx_966 3 1 H.1 20260 20261 2868 aten::Int pnnx_967 1 1 2868 2869 prim::Constant pnnx_968 0 1 20262 value=8 prim::Constant pnnx_969 0 1 20263 value=trunc aten::div pnnx_970 3 1 W.1 20262 20263 2870 aten::Int pnnx_971 1 1 2870 2871 prim::Constant pnnx_972 0 1 20264 value=1 prim::Constant pnnx_973 0 1 20265 value=8 prim::Constant pnnx_974 0 1 20266 value=8 prim::Constant pnnx_975 0 1 20267 value=-1 prim::ListConstruct pnnx_976 6 1 20264 2869 2871 20265 20266 20267 2872 prim::Constant pnnx_978 0 1 20268 value=0 prim::Constant pnnx_979 0 1 20269 value=1 prim::Constant pnnx_980 0 1 20270 value=3 prim::Constant pnnx_981 0 1 20271 value=2 prim::Constant pnnx_982 0 1 20272 value=4 prim::Constant pnnx_983 0 1 20273 value=5 prim::ListConstruct pnnx_984 6 1 20268 20269 20270 20271 20272 20273 2874 Tensor.view Tensor.view_1014 2 1 2865 2866 windows.9 $input=2865 $shape=2866 #2865=(36,64,192)f32 #windows.9=(36,8,8,192)f32 Tensor.view Tensor.view_1015 2 1 windows.9 2872 x3.9 $input=windows.9 $shape=2872 #windows.9=(36,8,8,192)f32 #x3.9=(1,6,6,8,8,192)f32 prim::Constant pnnx_988 0 1 20275 value=1 prim::Constant pnnx_989 0 1 20276 value=-1 prim::ListConstruct pnnx_990 4 1 20275 376 616 20276 2877 torch.permute torch.permute_2543 2 1 x3.9 2874 2875 $input=x3.9 $dims=2874 #x3.9=(1,6,6,8,8,192)f32 #2875=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_13 1 1 2875 2876 memory_format=torch.contiguous_format $input=2875 #2875=(1,6,8,6,8,192)f32 #2876=(1,6,8,6,8,192)f32 prim::Constant pnnx_992 0 1 20277 value=4 prim::Constant pnnx_993 0 1 20278 value=4 prim::ListConstruct pnnx_994 2 1 20277 20278 2879 prim::Constant pnnx_995 0 1 20279 value=1 prim::Constant pnnx_996 0 1 20280 value=2 prim::ListConstruct pnnx_997 2 1 20279 20280 2880 Tensor.view Tensor.view_1016 2 1 2876 2877 shifted_x.5 $input=2876 $shape=2877 #2876=(1,6,8,6,8,192)f32 #shifted_x.5=(1,48,48,192)f32 aten::mul pnnx_999 2 1 H.1 W.1 2882 aten::Int pnnx_1000 1 1 2882 2883 prim::ListConstruct pnnx_1001 3 1 2750 2883 2754 2884 prim::Constant pnnx_1003 0 1 2886 value=None prim::Constant pnnx_1004 0 1 20281 value=1 torch.roll torch.roll_2423 3 1 shifted_x.5 2879 2880 x4.9 $input=shifted_x.5 $shifts=2879 $dims=2880 #shifted_x.5=(1,48,48,192)f32 #x4.9=(1,48,48,192)f32 Tensor.view Tensor.view_1017 2 1 x4.9 2884 x5.5 $input=x4.9 $shape=2884 #x4.9=(1,48,48,192)f32 #x5.5=(1,2304,192)f32 aten::add pnnx_1005 3 1 2729 x5.5 20281 input.25 #2729=(1,2304,192)f32 #x5.5=(1,2304,192)f32 #input.25=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.3.norm2 1 1 input.25 2888 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.25=(1,2304,192)f32 #2888=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.3.mlp.fc1 1 1 2888 2893 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #2888=(1,2304,192)f32 #2893=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.3.mlp.act 1 1 2893 2894 #2893=(1,2304,384)f32 #2894=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.mlp.drop 1 1 2894 2895 #2894=(1,2304,384)f32 #2895=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.3.mlp.fc2 1 1 2895 2896 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #2895=(1,2304,384)f32 #2896=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.mlp.drop 1 1 2896 2897 #2896=(1,2304,192)f32 #2897=(1,2304,192)f32 prim::Constant pnnx_1006 0 1 2898 value=None prim::Constant pnnx_1007 0 1 20282 value=1 aten::add pnnx_1008 3 1 input.25 2897 20282 2899 #input.25=(1,2304,192)f32 #2897=(1,2304,192)f32 #2899=(1,2304,192)f32 prim::Constant pnnx_1009 0 1 2900 value=trunc prim::Constant pnnx_1010 0 1 2901 value=8 prim::Constant pnnx_1011 0 1 2902 value=0 prim::Constant pnnx_1012 0 1 2903 value=2 prim::Constant pnnx_1013 0 1 2904 value=1 prim::Constant pnnx_1014 0 1 2905 value=3 prim::Constant pnnx_1015 0 1 2906 value=8 prim::Constant pnnx_1016 0 1 2907 value=4 prim::Constant pnnx_1017 0 1 2908 value=5 prim::Constant pnnx_1018 0 1 2909 value=-1 prim::Constant pnnx_1019 0 1 2910 value=64 aten::size pnnx_1020 2 1 2899 2902 2916 #2899=(1,2304,192)f32 prim::NumToTensor pnnx_1021 1 1 2916 B.13 aten::Int pnnx_1022 1 1 B.13 2918 aten::Int pnnx_1023 1 1 B.13 2919 aten::size pnnx_1024 2 1 2899 2903 2920 #2899=(1,2304,192)f32 prim::NumToTensor pnnx_1025 1 1 2920 C.27 aten::Int pnnx_1026 1 1 C.27 2922 aten::Int pnnx_1027 1 1 C.27 2923 aten::Int pnnx_1028 1 1 C.27 2924 aten::Int pnnx_1029 1 1 C.27 2925 nn.LayerNorm layers_dfe.0.residual_group.blocks.4.norm1 1 1 2899 2926 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #2899=(1,2304,192)f32 #2926=(1,2304,192)f32 prim::ListConstruct pnnx_1030 4 1 2919 373 613 2925 2927 prim::Constant pnnx_1032 0 1 20283 value=0 Tensor.view Tensor.view_1018 2 1 2926 2927 x.11 $input=2926 $shape=2927 #2926=(1,2304,192)f32 #x.11=(1,48,48,192)f32 aten::size pnnx_1033 2 1 x.11 20283 2929 #x.11=(1,48,48,192)f32 prim::NumToTensor pnnx_1034 1 1 2929 B0.11 aten::Int pnnx_1035 1 1 B0.11 2931 aten::size pnnx_1036 2 1 x.11 2904 2932 #x.11=(1,48,48,192)f32 prim::NumToTensor pnnx_1037 1 1 2932 2933 prim::Constant pnnx_1038 0 1 20284 value=2 aten::size pnnx_1039 2 1 x.11 20284 2934 #x.11=(1,48,48,192)f32 prim::NumToTensor pnnx_1040 1 1 2934 2935 aten::size pnnx_1041 2 1 x.11 2905 2936 #x.11=(1,48,48,192)f32 prim::NumToTensor pnnx_1042 1 1 2936 C0.11 aten::Int pnnx_1043 1 1 C0.11 2938 aten::Int pnnx_1044 1 1 C0.11 2939 aten::div pnnx_1045 3 1 2933 2901 2900 2940 aten::Int pnnx_1046 1 1 2940 2941 prim::Constant pnnx_1047 0 1 20285 value=8 prim::Constant pnnx_1048 0 1 20286 value=trunc aten::div pnnx_1049 3 1 2935 20285 20286 2942 aten::Int pnnx_1050 1 1 2942 2943 prim::Constant pnnx_1051 0 1 20287 value=8 prim::ListConstruct pnnx_1052 6 1 2931 2941 2906 2943 20287 2939 2944 prim::Constant pnnx_1054 0 1 20288 value=0 prim::Constant pnnx_1055 0 1 20289 value=1 prim::Constant pnnx_1056 0 1 20290 value=3 prim::Constant pnnx_1057 0 1 20291 value=2 prim::ListConstruct pnnx_1058 6 1 20288 20289 20290 20291 2907 2908 2946 Tensor.view Tensor.view_1019 2 1 x.11 2944 x0.11 $input=x.11 $shape=2944 #x.11=(1,48,48,192)f32 #x0.11=(1,6,8,6,8,192)f32 prim::Constant pnnx_1062 0 1 20293 value=8 prim::Constant pnnx_1063 0 1 20294 value=8 prim::ListConstruct pnnx_1064 4 1 2909 20293 20294 2938 2949 torch.permute torch.permute_2544 2 1 x0.11 2946 2947 $input=x0.11 $dims=2946 #x0.11=(1,6,8,6,8,192)f32 #2947=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_14 1 1 2947 2948 memory_format=torch.contiguous_format $input=2947 #2947=(1,6,6,8,8,192)f32 #2948=(1,6,6,8,8,192)f32 prim::Constant pnnx_1066 0 1 20295 value=-1 prim::ListConstruct pnnx_1067 3 1 20295 2910 2924 2951 prim::Constant pnnx_1069 0 1 2953 value=1.767767e-01 prim::Constant pnnx_1070 0 1 2954 value=trunc prim::Constant pnnx_1071 0 1 2955 value=6 prim::Constant pnnx_1072 0 1 2956 value=0 prim::Constant pnnx_1073 0 1 2957 value=1 prim::Constant pnnx_1074 0 1 2958 value=2 prim::Constant pnnx_1075 0 1 2959 value=3 prim::Constant pnnx_1076 0 1 2960 value=6 prim::Constant pnnx_1077 0 1 2961 value=4 prim::Constant pnnx_1078 0 1 2962 value=-2 prim::Constant pnnx_1079 0 1 2963 value=-1 prim::Constant pnnx_1080 0 1 2964 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.4.attn 0 1 relative_position_bias_table.11 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.11=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.4.attn 0 1 relative_position_index.11 @relative_position_index=(64,64)i64 #relative_position_index.11=(64,64)i64 Tensor.view Tensor.view_1020 2 1 2948 2949 x_windows.11 $input=2948 $shape=2949 #2948=(1,6,6,8,8,192)f32 #x_windows.11=(36,8,8,192)f32 Tensor.view Tensor.view_1021 2 1 x_windows.11 2951 x1.11 $input=x_windows.11 $shape=2951 #x_windows.11=(36,8,8,192)f32 #x1.11=(36,64,192)f32 aten::size pnnx_1081 2 1 x1.11 2956 2972 #x1.11=(36,64,192)f32 prim::NumToTensor pnnx_1082 1 1 2972 B_.11 aten::Int pnnx_1083 1 1 B_.11 2974 aten::Int pnnx_1084 1 1 B_.11 2975 aten::size pnnx_1085 2 1 x1.11 2957 2976 #x1.11=(36,64,192)f32 prim::NumToTensor pnnx_1086 1 1 2976 N.11 aten::Int pnnx_1087 1 1 N.11 2978 aten::Int pnnx_1088 1 1 N.11 2979 aten::size pnnx_1089 2 1 x1.11 2958 2980 #x1.11=(36,64,192)f32 prim::NumToTensor pnnx_1090 1 1 2980 C.29 aten::Int pnnx_1091 1 1 C.29 2982 nn.Linear layers_dfe.0.residual_group.blocks.4.attn.qkv 1 1 x1.11 2983 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.11=(36,64,192)f32 #2983=(36,64,576)f32 aten::div pnnx_1092 3 1 C.29 2955 2954 2984 aten::Int pnnx_1093 1 1 2984 2985 prim::ListConstruct pnnx_1094 5 1 2975 2979 2959 2960 2985 2986 prim::Constant pnnx_1096 0 1 20296 value=2 prim::Constant pnnx_1097 0 1 20297 value=0 prim::Constant pnnx_1098 0 1 20298 value=3 prim::Constant pnnx_1099 0 1 20299 value=1 prim::ListConstruct pnnx_1100 5 1 20296 20297 20298 20299 2961 2988 Tensor.reshape Tensor.reshape_442 2 1 2983 2986 2987 $input=2983 $shape=2986 #2983=(36,64,576)f32 #2987=(36,64,3,6,32)f32 prim::Constant pnnx_1102 0 1 20300 value=0 prim::Constant pnnx_1103 0 1 20301 value=0 prim::Constant pnnx_1105 0 1 20302 value=0 prim::Constant pnnx_1106 0 1 20303 value=1 prim::Constant pnnx_1108 0 1 20304 value=0 prim::Constant pnnx_1109 0 1 20305 value=2 torch.permute torch.permute_2545 2 1 2987 2988 qkv0.11 $input=2987 $dims=2988 #2987=(36,64,3,6,32)f32 #qkv0.11=(3,36,6,64,32)f32 Tensor.select Tensor.select_662 3 1 qkv0.11 20300 20301 q.11 $input=qkv0.11 $dim=20300 $index=20301 #qkv0.11=(3,36,6,64,32)f32 #q.11=(36,6,64,32)f32 aten::mul pnnx_1111 2 1 q.11 2953 q0.11 #q.11=(36,6,64,32)f32 #q0.11=(36,6,64,32)f32 Tensor.select Tensor.select_663 3 1 qkv0.11 20302 20303 k.11 $input=qkv0.11 $dim=20302 $index=20303 #qkv0.11=(3,36,6,64,32)f32 #k.11=(36,6,64,32)f32 prim::Constant pnnx_1114 0 1 20306 value=-1 prim::ListConstruct pnnx_1115 1 1 20306 2996 Tensor.view Tensor.view_1022 2 1 relative_position_index.11 2996 2997 $input=relative_position_index.11 $shape=2996 #relative_position_index.11=(64,64)i64 #2997=(4096)i64 prim::ListConstruct pnnx_1117 1 1 2997 2998 #2997=(4096)i64 prim::Constant pnnx_1119 0 1 20307 value=64 prim::Constant pnnx_1120 0 1 20308 value=-1 prim::ListConstruct pnnx_1121 3 1 2964 20307 20308 3000 Tensor.index Tensor.index_330 2 1 relative_position_bias_table.11 2998 2999 $input=relative_position_bias_table.11 $expr=2998 #relative_position_bias_table.11=(225,6)f32 #2999=(4096,6)f32 prim::Constant pnnx_1123 0 1 20309 value=2 prim::Constant pnnx_1124 0 1 20310 value=0 prim::Constant pnnx_1125 0 1 20311 value=1 prim::ListConstruct pnnx_1126 3 1 20309 20310 20311 3002 Tensor.view Tensor.view_1023 2 1 2999 3000 relative_position_bias.11 $input=2999 $shape=3000 #2999=(4096,6)f32 #relative_position_bias.11=(64,64,6)f32 prim::Constant pnnx_1130 0 1 20313 value=0 torch.permute torch.permute_2546 2 1 relative_position_bias.11 3002 3003 $input=relative_position_bias.11 $dims=3002 #relative_position_bias.11=(64,64,6)f32 #3003=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_15 1 1 3003 relative_position_bias0.11 memory_format=torch.contiguous_format $input=3003 #3003=(6,64,64)f32 #relative_position_bias0.11=(6,64,64)f32 prim::Constant pnnx_1132 0 1 20314 value=1 torch.transpose torch.transpose_2969 3 1 k.11 2962 2963 2994 $input=k.11 $dim0=2962 $dim1=2963 #k.11=(36,6,64,32)f32 #2994=(36,6,32,64)f32 torch.matmul torch.matmul_2212 2 1 q0.11 2994 attn.23 $input=q0.11 $other=2994 #q0.11=(36,6,64,32)f32 #2994=(36,6,32,64)f32 #attn.23=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3226 2 1 relative_position_bias0.11 20313 3005 $input=relative_position_bias0.11 $dim=20313 #relative_position_bias0.11=(6,64,64)f32 #3005=(1,6,64,64)f32 aten::add pnnx_1133 3 1 attn.23 3005 20314 input.27 #attn.23=(36,6,64,64)f32 #3005=(1,6,64,64)f32 #input.27=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.4.attn.softmax 1 1 input.27 3007 dim=-1 #input.27=(36,6,64,64)f32 #3007=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.attn.attn_drop 1 1 3007 3008 #3007=(36,6,64,64)f32 #3008=(36,6,64,64)f32 Tensor.select Tensor.select_664 3 1 qkv0.11 20304 20305 v.11 $input=qkv0.11 $dim=20304 $index=20305 #qkv0.11=(3,36,6,64,32)f32 #v.11=(36,6,64,32)f32 prim::Constant pnnx_1135 0 1 20315 value=1 prim::Constant pnnx_1136 0 1 20316 value=2 torch.matmul torch.matmul_2213 2 1 3008 v.11 3009 $input=3008 $other=v.11 #3008=(36,6,64,64)f32 #v.11=(36,6,64,32)f32 #3009=(36,6,64,32)f32 prim::ListConstruct pnnx_1138 3 1 2974 2978 2982 3011 torch.transpose torch.transpose_2970 3 1 3009 20315 20316 3010 $input=3009 $dim0=20315 $dim1=20316 #3009=(36,6,64,32)f32 #3010=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_443 2 1 3010 3011 input0.13 $input=3010 $shape=3011 #3010=(36,64,6,32)f32 #input0.13=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.4.attn.proj 1 1 input0.13 3013 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.13=(36,64,192)f32 #3013=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.attn.proj_drop 1 1 3013 3014 #3013=(36,64,192)f32 #3014=(36,64,192)f32 prim::Constant pnnx_1140 0 1 20317 value=-1 prim::Constant pnnx_1141 0 1 20318 value=8 prim::Constant pnnx_1142 0 1 20319 value=8 prim::ListConstruct pnnx_1143 4 1 20317 20318 20319 2923 3015 prim::Constant pnnx_1145 0 1 20320 value=8 prim::Constant pnnx_1146 0 1 20321 value=trunc aten::div pnnx_1147 3 1 H.1 20320 20321 3017 aten::Int pnnx_1148 1 1 3017 3018 prim::Constant pnnx_1149 0 1 20322 value=8 prim::Constant pnnx_1150 0 1 20323 value=trunc aten::div pnnx_1151 3 1 W.1 20322 20323 3019 aten::Int pnnx_1152 1 1 3019 3020 prim::Constant pnnx_1153 0 1 20324 value=1 prim::Constant pnnx_1154 0 1 20325 value=8 prim::Constant pnnx_1155 0 1 20326 value=8 prim::Constant pnnx_1156 0 1 20327 value=-1 prim::ListConstruct pnnx_1157 6 1 20324 3018 3020 20325 20326 20327 3021 prim::Constant pnnx_1159 0 1 20328 value=0 prim::Constant pnnx_1160 0 1 20329 value=1 prim::Constant pnnx_1161 0 1 20330 value=3 prim::Constant pnnx_1162 0 1 20331 value=2 prim::Constant pnnx_1163 0 1 20332 value=4 prim::Constant pnnx_1164 0 1 20333 value=5 prim::ListConstruct pnnx_1165 6 1 20328 20329 20330 20331 20332 20333 3023 Tensor.view Tensor.view_1024 2 1 3014 3015 windows.11 $input=3014 $shape=3015 #3014=(36,64,192)f32 #windows.11=(36,8,8,192)f32 Tensor.view Tensor.view_1025 2 1 windows.11 3021 x2.11 $input=windows.11 $shape=3021 #windows.11=(36,8,8,192)f32 #x2.11=(1,6,6,8,8,192)f32 prim::Constant pnnx_1169 0 1 20335 value=1 prim::Constant pnnx_1170 0 1 20336 value=-1 prim::ListConstruct pnnx_1171 4 1 20335 370 610 20336 3026 torch.permute torch.permute_2547 2 1 x2.11 3023 3024 $input=x2.11 $dims=3023 #x2.11=(1,6,6,8,8,192)f32 #3024=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_16 1 1 3024 3025 memory_format=torch.contiguous_format $input=3024 #3024=(1,6,8,6,8,192)f32 #3025=(1,6,8,6,8,192)f32 aten::mul pnnx_1173 2 1 H.1 W.1 3028 aten::Int pnnx_1174 1 1 3028 3029 prim::ListConstruct pnnx_1175 3 1 2918 3029 2922 3030 prim::Constant pnnx_1177 0 1 3032 value=None prim::Constant pnnx_1178 0 1 20337 value=1 Tensor.view Tensor.view_1026 2 1 3025 3026 x3.11 $input=3025 $shape=3026 #3025=(1,6,8,6,8,192)f32 #x3.11=(1,48,48,192)f32 Tensor.view Tensor.view_1027 2 1 x3.11 3030 x4.11 $input=x3.11 $shape=3030 #x3.11=(1,48,48,192)f32 #x4.11=(1,2304,192)f32 aten::add pnnx_1179 3 1 2899 x4.11 20337 input.29 #2899=(1,2304,192)f32 #x4.11=(1,2304,192)f32 #input.29=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.4.norm2 1 1 input.29 3034 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.29=(1,2304,192)f32 #3034=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.4.mlp.fc1 1 1 3034 3039 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #3034=(1,2304,192)f32 #3039=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.4.mlp.act 1 1 3039 3040 #3039=(1,2304,384)f32 #3040=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.mlp.drop 1 1 3040 3041 #3040=(1,2304,384)f32 #3041=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.4.mlp.fc2 1 1 3041 3042 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #3041=(1,2304,384)f32 #3042=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.mlp.drop 1 1 3042 3043 #3042=(1,2304,192)f32 #3043=(1,2304,192)f32 prim::Constant pnnx_1180 0 1 3044 value=None prim::Constant pnnx_1181 0 1 20338 value=1 aten::add pnnx_1182 3 1 input.29 3043 20338 3045 #input.29=(1,2304,192)f32 #3043=(1,2304,192)f32 #3045=(1,2304,192)f32 prim::Constant pnnx_1183 0 1 3046 value=trunc prim::Constant pnnx_1184 0 1 3047 value=8 prim::Constant pnnx_1185 0 1 3048 value=0 prim::Constant pnnx_1186 0 1 3049 value=2 prim::Constant pnnx_1187 0 1 3050 value=-4 prim::Constant pnnx_1188 0 1 3051 value=1 prim::Constant pnnx_1189 0 1 3052 value=3 prim::Constant pnnx_1190 0 1 3053 value=8 prim::Constant pnnx_1191 0 1 3054 value=4 prim::Constant pnnx_1192 0 1 3055 value=5 prim::Constant pnnx_1193 0 1 3056 value=-1 prim::Constant pnnx_1194 0 1 3057 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.5 0 1 attn_mask.7 @attn_mask=(36,64,64)f32 #attn_mask.7=(36,64,64)f32 aten::size pnnx_1195 2 1 3045 3048 3064 #3045=(1,2304,192)f32 prim::NumToTensor pnnx_1196 1 1 3064 B.15 aten::Int pnnx_1197 1 1 B.15 3066 aten::Int pnnx_1198 1 1 B.15 3067 aten::size pnnx_1199 2 1 3045 3049 3068 #3045=(1,2304,192)f32 prim::NumToTensor pnnx_1200 1 1 3068 C.31 aten::Int pnnx_1201 1 1 C.31 3070 aten::Int pnnx_1202 1 1 C.31 3071 aten::Int pnnx_1203 1 1 C.31 3072 aten::Int pnnx_1204 1 1 C.31 3073 nn.LayerNorm layers_dfe.0.residual_group.blocks.5.norm1 1 1 3045 3074 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #3045=(1,2304,192)f32 #3074=(1,2304,192)f32 prim::ListConstruct pnnx_1205 4 1 3067 367 607 3073 3075 prim::Constant pnnx_1207 0 1 20339 value=-4 prim::ListConstruct pnnx_1208 2 1 3050 20339 3077 prim::Constant pnnx_1209 0 1 20340 value=2 prim::ListConstruct pnnx_1210 2 1 3051 20340 3078 Tensor.view Tensor.view_1028 2 1 3074 3075 x.13 $input=3074 $shape=3075 #3074=(1,2304,192)f32 #x.13=(1,48,48,192)f32 prim::Constant pnnx_1212 0 1 20341 value=0 torch.roll torch.roll_2424 3 1 x.13 3077 3078 x0.13 $input=x.13 $shifts=3077 $dims=3078 #x.13=(1,48,48,192)f32 #x0.13=(1,48,48,192)f32 aten::size pnnx_1213 2 1 x0.13 20341 3080 #x0.13=(1,48,48,192)f32 prim::NumToTensor pnnx_1214 1 1 3080 B0.13 aten::Int pnnx_1215 1 1 B0.13 3082 prim::Constant pnnx_1216 0 1 20342 value=1 aten::size pnnx_1217 2 1 x0.13 20342 3083 #x0.13=(1,48,48,192)f32 prim::NumToTensor pnnx_1218 1 1 3083 3084 prim::Constant pnnx_1219 0 1 20343 value=2 aten::size pnnx_1220 2 1 x0.13 20343 3085 #x0.13=(1,48,48,192)f32 prim::NumToTensor pnnx_1221 1 1 3085 3086 aten::size pnnx_1222 2 1 x0.13 3052 3087 #x0.13=(1,48,48,192)f32 prim::NumToTensor pnnx_1223 1 1 3087 C0.13 aten::Int pnnx_1224 1 1 C0.13 3089 aten::Int pnnx_1225 1 1 C0.13 3090 aten::div pnnx_1226 3 1 3084 3047 3046 3091 aten::Int pnnx_1227 1 1 3091 3092 prim::Constant pnnx_1228 0 1 20344 value=8 prim::Constant pnnx_1229 0 1 20345 value=trunc aten::div pnnx_1230 3 1 3086 20344 20345 3093 aten::Int pnnx_1231 1 1 3093 3094 prim::Constant pnnx_1232 0 1 20346 value=8 prim::ListConstruct pnnx_1233 6 1 3082 3092 3053 3094 20346 3090 3095 prim::Constant pnnx_1235 0 1 20347 value=0 prim::Constant pnnx_1236 0 1 20348 value=1 prim::Constant pnnx_1237 0 1 20349 value=3 prim::Constant pnnx_1238 0 1 20350 value=2 prim::ListConstruct pnnx_1239 6 1 20347 20348 20349 20350 3054 3055 3097 Tensor.view Tensor.view_1029 2 1 x0.13 3095 x1.13 $input=x0.13 $shape=3095 #x0.13=(1,48,48,192)f32 #x1.13=(1,6,8,6,8,192)f32 prim::Constant pnnx_1243 0 1 20352 value=8 prim::Constant pnnx_1244 0 1 20353 value=8 prim::ListConstruct pnnx_1245 4 1 3056 20352 20353 3089 3100 torch.permute torch.permute_2548 2 1 x1.13 3097 3098 $input=x1.13 $dims=3097 #x1.13=(1,6,8,6,8,192)f32 #3098=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_17 1 1 3098 3099 memory_format=torch.contiguous_format $input=3098 #3098=(1,6,6,8,8,192)f32 #3099=(1,6,6,8,8,192)f32 prim::Constant pnnx_1247 0 1 20354 value=-1 prim::ListConstruct pnnx_1248 3 1 20354 3057 3072 3102 prim::Constant pnnx_1250 0 1 3104 value=1.767767e-01 prim::Constant pnnx_1251 0 1 3105 value=trunc prim::Constant pnnx_1252 0 1 3106 value=6 prim::Constant pnnx_1253 0 1 3107 value=0 prim::Constant pnnx_1254 0 1 3108 value=1 prim::Constant pnnx_1255 0 1 3109 value=2 prim::Constant pnnx_1256 0 1 3110 value=3 prim::Constant pnnx_1257 0 1 3111 value=6 prim::Constant pnnx_1258 0 1 3112 value=4 prim::Constant pnnx_1259 0 1 3113 value=-2 prim::Constant pnnx_1260 0 1 3114 value=-1 prim::Constant pnnx_1261 0 1 3115 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.5.attn 0 1 relative_position_bias_table.13 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.13=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.5.attn 0 1 relative_position_index.13 @relative_position_index=(64,64)i64 #relative_position_index.13=(64,64)i64 Tensor.view Tensor.view_1030 2 1 3099 3100 x_windows.13 $input=3099 $shape=3100 #3099=(1,6,6,8,8,192)f32 #x_windows.13=(36,8,8,192)f32 Tensor.view Tensor.view_1031 2 1 x_windows.13 3102 x2.13 $input=x_windows.13 $shape=3102 #x_windows.13=(36,8,8,192)f32 #x2.13=(36,64,192)f32 aten::size pnnx_1262 2 1 x2.13 3107 3123 #x2.13=(36,64,192)f32 prim::NumToTensor pnnx_1263 1 1 3123 B_.13 aten::Int pnnx_1264 1 1 B_.13 3125 aten::Int pnnx_1265 1 1 B_.13 3126 aten::size pnnx_1266 2 1 x2.13 3108 3127 #x2.13=(36,64,192)f32 prim::NumToTensor pnnx_1267 1 1 3127 N.13 aten::Int pnnx_1268 1 1 N.13 3129 aten::Int pnnx_1269 1 1 N.13 3130 aten::Int pnnx_1270 1 1 N.13 3131 aten::Int pnnx_1271 1 1 N.13 3132 aten::Int pnnx_1272 1 1 N.13 3133 aten::Int pnnx_1273 1 1 N.13 3134 aten::size pnnx_1274 2 1 x2.13 3109 3135 #x2.13=(36,64,192)f32 prim::NumToTensor pnnx_1275 1 1 3135 C.33 aten::Int pnnx_1276 1 1 C.33 3137 nn.Linear layers_dfe.0.residual_group.blocks.5.attn.qkv 1 1 x2.13 3138 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.13=(36,64,192)f32 #3138=(36,64,576)f32 aten::div pnnx_1277 3 1 C.33 3106 3105 3139 aten::Int pnnx_1278 1 1 3139 3140 prim::ListConstruct pnnx_1279 5 1 3126 3134 3110 3111 3140 3141 prim::Constant pnnx_1281 0 1 20355 value=2 prim::Constant pnnx_1282 0 1 20356 value=0 prim::Constant pnnx_1283 0 1 20357 value=3 prim::Constant pnnx_1284 0 1 20358 value=1 prim::ListConstruct pnnx_1285 5 1 20355 20356 20357 20358 3112 3143 Tensor.reshape Tensor.reshape_444 2 1 3138 3141 3142 $input=3138 $shape=3141 #3138=(36,64,576)f32 #3142=(36,64,3,6,32)f32 prim::Constant pnnx_1287 0 1 20359 value=0 prim::Constant pnnx_1288 0 1 20360 value=0 prim::Constant pnnx_1290 0 1 20361 value=0 prim::Constant pnnx_1291 0 1 20362 value=1 prim::Constant pnnx_1293 0 1 20363 value=0 prim::Constant pnnx_1294 0 1 20364 value=2 torch.permute torch.permute_2549 2 1 3142 3143 qkv0.13 $input=3142 $dims=3143 #3142=(36,64,3,6,32)f32 #qkv0.13=(3,36,6,64,32)f32 Tensor.select Tensor.select_665 3 1 qkv0.13 20359 20360 q.13 $input=qkv0.13 $dim=20359 $index=20360 #qkv0.13=(3,36,6,64,32)f32 #q.13=(36,6,64,32)f32 aten::mul pnnx_1296 2 1 q.13 3104 q0.13 #q.13=(36,6,64,32)f32 #q0.13=(36,6,64,32)f32 Tensor.select Tensor.select_666 3 1 qkv0.13 20361 20362 k.13 $input=qkv0.13 $dim=20361 $index=20362 #qkv0.13=(3,36,6,64,32)f32 #k.13=(36,6,64,32)f32 prim::Constant pnnx_1299 0 1 20365 value=-1 prim::ListConstruct pnnx_1300 1 1 20365 3151 Tensor.view Tensor.view_1032 2 1 relative_position_index.13 3151 3152 $input=relative_position_index.13 $shape=3151 #relative_position_index.13=(64,64)i64 #3152=(4096)i64 prim::ListConstruct pnnx_1302 1 1 3152 3153 #3152=(4096)i64 prim::Constant pnnx_1304 0 1 20366 value=64 prim::Constant pnnx_1305 0 1 20367 value=-1 prim::ListConstruct pnnx_1306 3 1 3115 20366 20367 3155 Tensor.index Tensor.index_331 2 1 relative_position_bias_table.13 3153 3154 $input=relative_position_bias_table.13 $expr=3153 #relative_position_bias_table.13=(225,6)f32 #3154=(4096,6)f32 prim::Constant pnnx_1308 0 1 20368 value=2 prim::Constant pnnx_1309 0 1 20369 value=0 prim::Constant pnnx_1310 0 1 20370 value=1 prim::ListConstruct pnnx_1311 3 1 20368 20369 20370 3157 Tensor.view Tensor.view_1033 2 1 3154 3155 relative_position_bias.13 $input=3154 $shape=3155 #3154=(4096,6)f32 #relative_position_bias.13=(64,64,6)f32 prim::Constant pnnx_1315 0 1 20372 value=0 torch.permute torch.permute_2550 2 1 relative_position_bias.13 3157 3158 $input=relative_position_bias.13 $dims=3157 #relative_position_bias.13=(64,64,6)f32 #3158=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_18 1 1 3158 relative_position_bias0.13 memory_format=torch.contiguous_format $input=3158 #3158=(6,64,64)f32 #relative_position_bias0.13=(6,64,64)f32 prim::Constant pnnx_1317 0 1 20373 value=1 torch.transpose torch.transpose_2971 3 1 k.13 3113 3114 3149 $input=k.13 $dim0=3113 $dim1=3114 #k.13=(36,6,64,32)f32 #3149=(36,6,32,64)f32 torch.matmul torch.matmul_2214 2 1 q0.13 3149 attn.27 $input=q0.13 $other=3149 #q0.13=(36,6,64,32)f32 #3149=(36,6,32,64)f32 #attn.27=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3227 2 1 relative_position_bias0.13 20372 3160 $input=relative_position_bias0.13 $dim=20372 #relative_position_bias0.13=(6,64,64)f32 #3160=(1,6,64,64)f32 aten::add pnnx_1318 3 1 attn.27 3160 20373 attn0.7 #attn.27=(36,6,64,64)f32 #3160=(1,6,64,64)f32 #attn0.7=(36,6,64,64)f32 prim::Constant pnnx_1319 0 1 20374 value=0 aten::size pnnx_1320 2 1 attn_mask.7 20374 3162 #attn_mask.7=(36,64,64)f32 prim::NumToTensor pnnx_1321 1 1 3162 other.7 aten::Int pnnx_1322 1 1 other.7 3164 prim::Constant pnnx_1323 0 1 20375 value=trunc aten::div pnnx_1324 3 1 B_.13 other.7 20375 3165 aten::Int pnnx_1325 1 1 3165 3166 prim::Constant pnnx_1326 0 1 20376 value=6 prim::ListConstruct pnnx_1327 5 1 3166 3164 20376 3133 3132 3167 prim::Constant pnnx_1329 0 1 20377 value=1 prim::Constant pnnx_1331 0 1 20378 value=0 prim::Constant pnnx_1333 0 1 20379 value=1 Tensor.view Tensor.view_1034 2 1 attn0.7 3167 3168 $input=attn0.7 $shape=3167 #attn0.7=(36,6,64,64)f32 #3168=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3228 2 1 attn_mask.7 20377 3169 $input=attn_mask.7 $dim=20377 #attn_mask.7=(36,64,64)f32 #3169=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3229 2 1 3169 20378 3170 $input=3169 $dim=20378 #3169=(36,1,64,64)f32 #3170=(1,36,1,64,64)f32 aten::add pnnx_1334 3 1 3168 3170 20379 attn1.7 #3168=(1,36,6,64,64)f32 #3170=(1,36,1,64,64)f32 #attn1.7=(1,36,6,64,64)f32 prim::Constant pnnx_1335 0 1 20380 value=-1 prim::Constant pnnx_1336 0 1 20381 value=6 prim::ListConstruct pnnx_1337 4 1 20380 20381 3131 3130 3172 Tensor.view Tensor.view_1035 2 1 attn1.7 3172 input.31 $input=attn1.7 $shape=3172 #attn1.7=(1,36,6,64,64)f32 #input.31=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.5.attn.softmax 1 1 input.31 3174 dim=-1 #input.31=(36,6,64,64)f32 #3174=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.attn.attn_drop 1 1 3174 3175 #3174=(36,6,64,64)f32 #3175=(36,6,64,64)f32 Tensor.select Tensor.select_667 3 1 qkv0.13 20363 20364 v.13 $input=qkv0.13 $dim=20363 $index=20364 #qkv0.13=(3,36,6,64,32)f32 #v.13=(36,6,64,32)f32 prim::Constant pnnx_1340 0 1 20382 value=1 prim::Constant pnnx_1341 0 1 20383 value=2 torch.matmul torch.matmul_2215 2 1 3175 v.13 3176 $input=3175 $other=v.13 #3175=(36,6,64,64)f32 #v.13=(36,6,64,32)f32 #3176=(36,6,64,32)f32 prim::ListConstruct pnnx_1343 3 1 3125 3129 3137 3178 torch.transpose torch.transpose_2972 3 1 3176 20382 20383 3177 $input=3176 $dim0=20382 $dim1=20383 #3176=(36,6,64,32)f32 #3177=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_445 2 1 3177 3178 input0.15 $input=3177 $shape=3178 #3177=(36,64,6,32)f32 #input0.15=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.5.attn.proj 1 1 input0.15 3180 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.15=(36,64,192)f32 #3180=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.attn.proj_drop 1 1 3180 3181 #3180=(36,64,192)f32 #3181=(36,64,192)f32 prim::Constant pnnx_1345 0 1 20384 value=-1 prim::Constant pnnx_1346 0 1 20385 value=8 prim::Constant pnnx_1347 0 1 20386 value=8 prim::ListConstruct pnnx_1348 4 1 20384 20385 20386 3071 3182 prim::Constant pnnx_1350 0 1 20387 value=8 prim::Constant pnnx_1351 0 1 20388 value=trunc aten::div pnnx_1352 3 1 H.1 20387 20388 3184 aten::Int pnnx_1353 1 1 3184 3185 prim::Constant pnnx_1354 0 1 20389 value=8 prim::Constant pnnx_1355 0 1 20390 value=trunc aten::div pnnx_1356 3 1 W.1 20389 20390 3186 aten::Int pnnx_1357 1 1 3186 3187 prim::Constant pnnx_1358 0 1 20391 value=1 prim::Constant pnnx_1359 0 1 20392 value=8 prim::Constant pnnx_1360 0 1 20393 value=8 prim::Constant pnnx_1361 0 1 20394 value=-1 prim::ListConstruct pnnx_1362 6 1 20391 3185 3187 20392 20393 20394 3188 prim::Constant pnnx_1364 0 1 20395 value=0 prim::Constant pnnx_1365 0 1 20396 value=1 prim::Constant pnnx_1366 0 1 20397 value=3 prim::Constant pnnx_1367 0 1 20398 value=2 prim::Constant pnnx_1368 0 1 20399 value=4 prim::Constant pnnx_1369 0 1 20400 value=5 prim::ListConstruct pnnx_1370 6 1 20395 20396 20397 20398 20399 20400 3190 Tensor.view Tensor.view_1036 2 1 3181 3182 windows.13 $input=3181 $shape=3182 #3181=(36,64,192)f32 #windows.13=(36,8,8,192)f32 Tensor.view Tensor.view_1037 2 1 windows.13 3188 x3.13 $input=windows.13 $shape=3188 #windows.13=(36,8,8,192)f32 #x3.13=(1,6,6,8,8,192)f32 prim::Constant pnnx_1374 0 1 20402 value=1 prim::Constant pnnx_1375 0 1 20403 value=-1 prim::ListConstruct pnnx_1376 4 1 20402 364 604 20403 3193 torch.permute torch.permute_2551 2 1 x3.13 3190 3191 $input=x3.13 $dims=3190 #x3.13=(1,6,6,8,8,192)f32 #3191=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_19 1 1 3191 3192 memory_format=torch.contiguous_format $input=3191 #3191=(1,6,8,6,8,192)f32 #3192=(1,6,8,6,8,192)f32 prim::Constant pnnx_1378 0 1 20404 value=4 prim::Constant pnnx_1379 0 1 20405 value=4 prim::ListConstruct pnnx_1380 2 1 20404 20405 3195 prim::Constant pnnx_1381 0 1 20406 value=1 prim::Constant pnnx_1382 0 1 20407 value=2 prim::ListConstruct pnnx_1383 2 1 20406 20407 3196 Tensor.view Tensor.view_1038 2 1 3192 3193 shifted_x.7 $input=3192 $shape=3193 #3192=(1,6,8,6,8,192)f32 #shifted_x.7=(1,48,48,192)f32 aten::mul pnnx_1385 2 1 H.1 W.1 3198 aten::Int pnnx_1386 1 1 3198 3199 prim::ListConstruct pnnx_1387 3 1 3066 3199 3070 3200 prim::Constant pnnx_1389 0 1 3202 value=None prim::Constant pnnx_1390 0 1 20408 value=1 torch.roll torch.roll_2425 3 1 shifted_x.7 3195 3196 x4.13 $input=shifted_x.7 $shifts=3195 $dims=3196 #shifted_x.7=(1,48,48,192)f32 #x4.13=(1,48,48,192)f32 Tensor.view Tensor.view_1039 2 1 x4.13 3200 x5.7 $input=x4.13 $shape=3200 #x4.13=(1,48,48,192)f32 #x5.7=(1,2304,192)f32 aten::add pnnx_1391 3 1 3045 x5.7 20408 input.33 #3045=(1,2304,192)f32 #x5.7=(1,2304,192)f32 #input.33=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.5.norm2 1 1 input.33 3204 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.33=(1,2304,192)f32 #3204=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.5.mlp.fc1 1 1 3204 3209 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #3204=(1,2304,192)f32 #3209=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.5.mlp.act 1 1 3209 3210 #3209=(1,2304,384)f32 #3210=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.mlp.drop 1 1 3210 3211 #3210=(1,2304,384)f32 #3211=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.5.mlp.fc2 1 1 3211 3212 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #3211=(1,2304,384)f32 #3212=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.mlp.drop 1 1 3212 3213 #3212=(1,2304,192)f32 #3213=(1,2304,192)f32 prim::Constant pnnx_1392 0 1 3214 value=None prim::Constant pnnx_1393 0 1 20409 value=1 aten::add pnnx_1394 3 1 input.33 3213 20409 3215 #input.33=(1,2304,192)f32 #3213=(1,2304,192)f32 #3215=(1,2304,192)f32 prim::Constant pnnx_1395 0 1 3216 value=0 prim::Constant pnnx_1396 0 1 3217 value=1 prim::Constant pnnx_1397 0 1 3218 value=2 prim::Constant pnnx_1398 0 1 3219 value=192 aten::size pnnx_1399 2 1 3215 3216 3220 #3215=(1,2304,192)f32 prim::NumToTensor pnnx_1400 1 1 3220 B.17 aten::Int pnnx_1401 1 1 B.17 3222 prim::ListConstruct pnnx_1403 4 1 3222 3219 361 601 3224 torch.transpose torch.transpose_2973 3 1 3215 3217 3218 3223 $input=3215 $dim0=3217 $dim1=3218 #3215=(1,2304,192)f32 #3223=(1,192,2304)f32 Tensor.view Tensor.view_1040 2 1 3223 3224 input.35 $input=3223 $shape=3224 #3223=(1,192,2304)f32 #input.35=(1,192,48,48)f32 nn.Conv2d layers_dfe.0.conv 1 1 input.35 3226 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.35=(1,192,48,48)f32 #3226=(1,192,48,48)f32 prim::Constant pnnx_1405 0 1 3227 value=-1 prim::Constant pnnx_1406 0 1 3228 value=2 prim::Constant pnnx_1407 0 1 3229 value=1 prim::Constant pnnx_1409 0 1 20410 value=2 torch.flatten torch.flatten_2184 3 1 3226 3228 3227 3230 $input=3226 $start_dim=3228 $end_dim=3227 #3226=(1,192,48,48)f32 #3230=(1,192,2304)f32 torch.transpose torch.transpose_2974 3 1 3230 3229 20410 3231 $input=3230 $dim0=3229 $dim1=20410 #3230=(1,192,2304)f32 #3231=(1,2304,192)f32 aten::add pnnx_1411 3 1 3231 642 2251 3232 #3231=(1,2304,192)f32 #642=(1,2304,192)f32 #3232=(1,2304,192)f32 prim::Constant pnnx_1412 0 1 3233 value=1 prim::Constant pnnx_1413 0 1 3250 value=trunc prim::Constant pnnx_1414 0 1 3251 value=8 prim::Constant pnnx_1415 0 1 3252 value=0 prim::Constant pnnx_1416 0 1 3253 value=2 prim::Constant pnnx_1417 0 1 3254 value=1 prim::Constant pnnx_1418 0 1 3255 value=3 prim::Constant pnnx_1419 0 1 3256 value=8 prim::Constant pnnx_1420 0 1 3257 value=4 prim::Constant pnnx_1421 0 1 3258 value=5 prim::Constant pnnx_1422 0 1 3259 value=-1 prim::Constant pnnx_1423 0 1 3260 value=64 aten::size pnnx_1424 2 1 3232 3252 3266 #3232=(1,2304,192)f32 prim::NumToTensor pnnx_1425 1 1 3266 B.19 aten::Int pnnx_1426 1 1 B.19 3268 aten::Int pnnx_1427 1 1 B.19 3269 aten::size pnnx_1428 2 1 3232 3253 3270 #3232=(1,2304,192)f32 prim::NumToTensor pnnx_1429 1 1 3270 C.35 aten::Int pnnx_1430 1 1 C.35 3272 aten::Int pnnx_1431 1 1 C.35 3273 aten::Int pnnx_1432 1 1 C.35 3274 aten::Int pnnx_1433 1 1 C.35 3275 nn.LayerNorm layers_dfe.1.residual_group.blocks.0.norm1 1 1 3232 3276 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #3232=(1,2304,192)f32 #3276=(1,2304,192)f32 prim::ListConstruct pnnx_1434 4 1 3269 358 598 3275 3277 prim::Constant pnnx_1436 0 1 20411 value=0 Tensor.view Tensor.view_1041 2 1 3276 3277 x.15 $input=3276 $shape=3277 #3276=(1,2304,192)f32 #x.15=(1,48,48,192)f32 aten::size pnnx_1437 2 1 x.15 20411 3279 #x.15=(1,48,48,192)f32 prim::NumToTensor pnnx_1438 1 1 3279 B0.15 aten::Int pnnx_1439 1 1 B0.15 3281 aten::size pnnx_1440 2 1 x.15 3254 3282 #x.15=(1,48,48,192)f32 prim::NumToTensor pnnx_1441 1 1 3282 3283 prim::Constant pnnx_1442 0 1 20412 value=2 aten::size pnnx_1443 2 1 x.15 20412 3284 #x.15=(1,48,48,192)f32 prim::NumToTensor pnnx_1444 1 1 3284 3285 aten::size pnnx_1445 2 1 x.15 3255 3286 #x.15=(1,48,48,192)f32 prim::NumToTensor pnnx_1446 1 1 3286 C0.15 aten::Int pnnx_1447 1 1 C0.15 3288 aten::Int pnnx_1448 1 1 C0.15 3289 aten::div pnnx_1449 3 1 3283 3251 3250 3290 aten::Int pnnx_1450 1 1 3290 3291 prim::Constant pnnx_1451 0 1 20413 value=8 prim::Constant pnnx_1452 0 1 20414 value=trunc aten::div pnnx_1453 3 1 3285 20413 20414 3292 aten::Int pnnx_1454 1 1 3292 3293 prim::Constant pnnx_1455 0 1 20415 value=8 prim::ListConstruct pnnx_1456 6 1 3281 3291 3256 3293 20415 3289 3294 prim::Constant pnnx_1458 0 1 20416 value=0 prim::Constant pnnx_1459 0 1 20417 value=1 prim::Constant pnnx_1460 0 1 20418 value=3 prim::Constant pnnx_1461 0 1 20419 value=2 prim::ListConstruct pnnx_1462 6 1 20416 20417 20418 20419 3257 3258 3296 Tensor.view Tensor.view_1042 2 1 x.15 3294 x0.15 $input=x.15 $shape=3294 #x.15=(1,48,48,192)f32 #x0.15=(1,6,8,6,8,192)f32 prim::Constant pnnx_1466 0 1 20421 value=8 prim::Constant pnnx_1467 0 1 20422 value=8 prim::ListConstruct pnnx_1468 4 1 3259 20421 20422 3288 3299 torch.permute torch.permute_2552 2 1 x0.15 3296 3297 $input=x0.15 $dims=3296 #x0.15=(1,6,8,6,8,192)f32 #3297=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_20 1 1 3297 3298 memory_format=torch.contiguous_format $input=3297 #3297=(1,6,6,8,8,192)f32 #3298=(1,6,6,8,8,192)f32 prim::Constant pnnx_1470 0 1 20423 value=-1 prim::ListConstruct pnnx_1471 3 1 20423 3260 3274 3301 prim::Constant pnnx_1473 0 1 3303 value=1.767767e-01 prim::Constant pnnx_1474 0 1 3304 value=trunc prim::Constant pnnx_1475 0 1 3305 value=6 prim::Constant pnnx_1476 0 1 3306 value=0 prim::Constant pnnx_1477 0 1 3307 value=1 prim::Constant pnnx_1478 0 1 3308 value=2 prim::Constant pnnx_1479 0 1 3309 value=3 prim::Constant pnnx_1480 0 1 3310 value=6 prim::Constant pnnx_1481 0 1 3311 value=4 prim::Constant pnnx_1482 0 1 3312 value=-2 prim::Constant pnnx_1483 0 1 3313 value=-1 prim::Constant pnnx_1484 0 1 3314 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.0.attn 0 1 relative_position_bias_table.15 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.15=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.0.attn 0 1 relative_position_index.15 @relative_position_index=(64,64)i64 #relative_position_index.15=(64,64)i64 Tensor.view Tensor.view_1043 2 1 3298 3299 x_windows.15 $input=3298 $shape=3299 #3298=(1,6,6,8,8,192)f32 #x_windows.15=(36,8,8,192)f32 Tensor.view Tensor.view_1044 2 1 x_windows.15 3301 x1.15 $input=x_windows.15 $shape=3301 #x_windows.15=(36,8,8,192)f32 #x1.15=(36,64,192)f32 aten::size pnnx_1485 2 1 x1.15 3306 3322 #x1.15=(36,64,192)f32 prim::NumToTensor pnnx_1486 1 1 3322 B_.15 aten::Int pnnx_1487 1 1 B_.15 3324 aten::Int pnnx_1488 1 1 B_.15 3325 aten::size pnnx_1489 2 1 x1.15 3307 3326 #x1.15=(36,64,192)f32 prim::NumToTensor pnnx_1490 1 1 3326 N.15 aten::Int pnnx_1491 1 1 N.15 3328 aten::Int pnnx_1492 1 1 N.15 3329 aten::size pnnx_1493 2 1 x1.15 3308 3330 #x1.15=(36,64,192)f32 prim::NumToTensor pnnx_1494 1 1 3330 C.37 aten::Int pnnx_1495 1 1 C.37 3332 nn.Linear layers_dfe.1.residual_group.blocks.0.attn.qkv 1 1 x1.15 3333 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.15=(36,64,192)f32 #3333=(36,64,576)f32 aten::div pnnx_1496 3 1 C.37 3305 3304 3334 aten::Int pnnx_1497 1 1 3334 3335 prim::ListConstruct pnnx_1498 5 1 3325 3329 3309 3310 3335 3336 prim::Constant pnnx_1500 0 1 20424 value=2 prim::Constant pnnx_1501 0 1 20425 value=0 prim::Constant pnnx_1502 0 1 20426 value=3 prim::Constant pnnx_1503 0 1 20427 value=1 prim::ListConstruct pnnx_1504 5 1 20424 20425 20426 20427 3311 3338 Tensor.reshape Tensor.reshape_446 2 1 3333 3336 3337 $input=3333 $shape=3336 #3333=(36,64,576)f32 #3337=(36,64,3,6,32)f32 prim::Constant pnnx_1506 0 1 20428 value=0 prim::Constant pnnx_1507 0 1 20429 value=0 prim::Constant pnnx_1509 0 1 20430 value=0 prim::Constant pnnx_1510 0 1 20431 value=1 prim::Constant pnnx_1512 0 1 20432 value=0 prim::Constant pnnx_1513 0 1 20433 value=2 torch.permute torch.permute_2553 2 1 3337 3338 qkv0.15 $input=3337 $dims=3338 #3337=(36,64,3,6,32)f32 #qkv0.15=(3,36,6,64,32)f32 Tensor.select Tensor.select_668 3 1 qkv0.15 20428 20429 q.15 $input=qkv0.15 $dim=20428 $index=20429 #qkv0.15=(3,36,6,64,32)f32 #q.15=(36,6,64,32)f32 aten::mul pnnx_1515 2 1 q.15 3303 q0.15 #q.15=(36,6,64,32)f32 #q0.15=(36,6,64,32)f32 Tensor.select Tensor.select_669 3 1 qkv0.15 20430 20431 k.15 $input=qkv0.15 $dim=20430 $index=20431 #qkv0.15=(3,36,6,64,32)f32 #k.15=(36,6,64,32)f32 prim::Constant pnnx_1518 0 1 20434 value=-1 prim::ListConstruct pnnx_1519 1 1 20434 3346 Tensor.view Tensor.view_1045 2 1 relative_position_index.15 3346 3347 $input=relative_position_index.15 $shape=3346 #relative_position_index.15=(64,64)i64 #3347=(4096)i64 prim::ListConstruct pnnx_1521 1 1 3347 3348 #3347=(4096)i64 prim::Constant pnnx_1523 0 1 20435 value=64 prim::Constant pnnx_1524 0 1 20436 value=-1 prim::ListConstruct pnnx_1525 3 1 3314 20435 20436 3350 Tensor.index Tensor.index_332 2 1 relative_position_bias_table.15 3348 3349 $input=relative_position_bias_table.15 $expr=3348 #relative_position_bias_table.15=(225,6)f32 #3349=(4096,6)f32 prim::Constant pnnx_1527 0 1 20437 value=2 prim::Constant pnnx_1528 0 1 20438 value=0 prim::Constant pnnx_1529 0 1 20439 value=1 prim::ListConstruct pnnx_1530 3 1 20437 20438 20439 3352 Tensor.view Tensor.view_1046 2 1 3349 3350 relative_position_bias.15 $input=3349 $shape=3350 #3349=(4096,6)f32 #relative_position_bias.15=(64,64,6)f32 prim::Constant pnnx_1534 0 1 20441 value=0 torch.permute torch.permute_2554 2 1 relative_position_bias.15 3352 3353 $input=relative_position_bias.15 $dims=3352 #relative_position_bias.15=(64,64,6)f32 #3353=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_21 1 1 3353 relative_position_bias0.15 memory_format=torch.contiguous_format $input=3353 #3353=(6,64,64)f32 #relative_position_bias0.15=(6,64,64)f32 prim::Constant pnnx_1536 0 1 20442 value=1 torch.transpose torch.transpose_2975 3 1 k.15 3312 3313 3344 $input=k.15 $dim0=3312 $dim1=3313 #k.15=(36,6,64,32)f32 #3344=(36,6,32,64)f32 torch.matmul torch.matmul_2216 2 1 q0.15 3344 attn.31 $input=q0.15 $other=3344 #q0.15=(36,6,64,32)f32 #3344=(36,6,32,64)f32 #attn.31=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3230 2 1 relative_position_bias0.15 20441 3355 $input=relative_position_bias0.15 $dim=20441 #relative_position_bias0.15=(6,64,64)f32 #3355=(1,6,64,64)f32 aten::add pnnx_1537 3 1 attn.31 3355 20442 input.37 #attn.31=(36,6,64,64)f32 #3355=(1,6,64,64)f32 #input.37=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.0.attn.softmax 1 1 input.37 3357 dim=-1 #input.37=(36,6,64,64)f32 #3357=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.attn.attn_drop 1 1 3357 3358 #3357=(36,6,64,64)f32 #3358=(36,6,64,64)f32 Tensor.select Tensor.select_670 3 1 qkv0.15 20432 20433 v.15 $input=qkv0.15 $dim=20432 $index=20433 #qkv0.15=(3,36,6,64,32)f32 #v.15=(36,6,64,32)f32 prim::Constant pnnx_1539 0 1 20443 value=1 prim::Constant pnnx_1540 0 1 20444 value=2 torch.matmul torch.matmul_2217 2 1 3358 v.15 3359 $input=3358 $other=v.15 #3358=(36,6,64,64)f32 #v.15=(36,6,64,32)f32 #3359=(36,6,64,32)f32 prim::ListConstruct pnnx_1542 3 1 3324 3328 3332 3361 torch.transpose torch.transpose_2976 3 1 3359 20443 20444 3360 $input=3359 $dim0=20443 $dim1=20444 #3359=(36,6,64,32)f32 #3360=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_447 2 1 3360 3361 input0.17 $input=3360 $shape=3361 #3360=(36,64,6,32)f32 #input0.17=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.0.attn.proj 1 1 input0.17 3363 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.17=(36,64,192)f32 #3363=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.attn.proj_drop 1 1 3363 3364 #3363=(36,64,192)f32 #3364=(36,64,192)f32 prim::Constant pnnx_1544 0 1 20445 value=-1 prim::Constant pnnx_1545 0 1 20446 value=8 prim::Constant pnnx_1546 0 1 20447 value=8 prim::ListConstruct pnnx_1547 4 1 20445 20446 20447 3273 3365 prim::Constant pnnx_1549 0 1 20448 value=8 prim::Constant pnnx_1550 0 1 20449 value=trunc aten::div pnnx_1551 3 1 H.1 20448 20449 3367 aten::Int pnnx_1552 1 1 3367 3368 prim::Constant pnnx_1553 0 1 20450 value=8 prim::Constant pnnx_1554 0 1 20451 value=trunc aten::div pnnx_1555 3 1 W.1 20450 20451 3369 aten::Int pnnx_1556 1 1 3369 3370 prim::Constant pnnx_1557 0 1 20452 value=1 prim::Constant pnnx_1558 0 1 20453 value=8 prim::Constant pnnx_1559 0 1 20454 value=8 prim::Constant pnnx_1560 0 1 20455 value=-1 prim::ListConstruct pnnx_1561 6 1 20452 3368 3370 20453 20454 20455 3371 prim::Constant pnnx_1563 0 1 20456 value=0 prim::Constant pnnx_1564 0 1 20457 value=1 prim::Constant pnnx_1565 0 1 20458 value=3 prim::Constant pnnx_1566 0 1 20459 value=2 prim::Constant pnnx_1567 0 1 20460 value=4 prim::Constant pnnx_1568 0 1 20461 value=5 prim::ListConstruct pnnx_1569 6 1 20456 20457 20458 20459 20460 20461 3373 Tensor.view Tensor.view_1047 2 1 3364 3365 windows.15 $input=3364 $shape=3365 #3364=(36,64,192)f32 #windows.15=(36,8,8,192)f32 Tensor.view Tensor.view_1048 2 1 windows.15 3371 x2.15 $input=windows.15 $shape=3371 #windows.15=(36,8,8,192)f32 #x2.15=(1,6,6,8,8,192)f32 prim::Constant pnnx_1573 0 1 20463 value=1 prim::Constant pnnx_1574 0 1 20464 value=-1 prim::ListConstruct pnnx_1575 4 1 20463 355 595 20464 3376 torch.permute torch.permute_2555 2 1 x2.15 3373 3374 $input=x2.15 $dims=3373 #x2.15=(1,6,6,8,8,192)f32 #3374=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_22 1 1 3374 3375 memory_format=torch.contiguous_format $input=3374 #3374=(1,6,8,6,8,192)f32 #3375=(1,6,8,6,8,192)f32 aten::mul pnnx_1577 2 1 H.1 W.1 3378 aten::Int pnnx_1578 1 1 3378 3379 prim::ListConstruct pnnx_1579 3 1 3268 3379 3272 3380 prim::Constant pnnx_1581 0 1 3382 value=None prim::Constant pnnx_1582 0 1 20465 value=1 Tensor.view Tensor.view_1049 2 1 3375 3376 x3.15 $input=3375 $shape=3376 #3375=(1,6,8,6,8,192)f32 #x3.15=(1,48,48,192)f32 Tensor.view Tensor.view_1050 2 1 x3.15 3380 x4.15 $input=x3.15 $shape=3380 #x3.15=(1,48,48,192)f32 #x4.15=(1,2304,192)f32 aten::add pnnx_1583 3 1 3232 x4.15 20465 input.39 #3232=(1,2304,192)f32 #x4.15=(1,2304,192)f32 #input.39=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.0.norm2 1 1 input.39 3384 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.39=(1,2304,192)f32 #3384=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.0.mlp.fc1 1 1 3384 3389 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #3384=(1,2304,192)f32 #3389=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.0.mlp.act 1 1 3389 3390 #3389=(1,2304,384)f32 #3390=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.mlp.drop 1 1 3390 3391 #3390=(1,2304,384)f32 #3391=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.0.mlp.fc2 1 1 3391 3392 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #3391=(1,2304,384)f32 #3392=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.mlp.drop 1 1 3392 3393 #3392=(1,2304,192)f32 #3393=(1,2304,192)f32 prim::Constant pnnx_1584 0 1 3394 value=None prim::Constant pnnx_1585 0 1 20466 value=1 aten::add pnnx_1586 3 1 input.39 3393 20466 3395 #input.39=(1,2304,192)f32 #3393=(1,2304,192)f32 #3395=(1,2304,192)f32 prim::Constant pnnx_1587 0 1 3396 value=trunc prim::Constant pnnx_1588 0 1 3397 value=8 prim::Constant pnnx_1589 0 1 3398 value=0 prim::Constant pnnx_1590 0 1 3399 value=2 prim::Constant pnnx_1591 0 1 3400 value=-4 prim::Constant pnnx_1592 0 1 3401 value=1 prim::Constant pnnx_1593 0 1 3402 value=3 prim::Constant pnnx_1594 0 1 3403 value=8 prim::Constant pnnx_1595 0 1 3404 value=4 prim::Constant pnnx_1596 0 1 3405 value=5 prim::Constant pnnx_1597 0 1 3406 value=-1 prim::Constant pnnx_1598 0 1 3407 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.1 0 1 attn_mask.9 @attn_mask=(36,64,64)f32 #attn_mask.9=(36,64,64)f32 aten::size pnnx_1599 2 1 3395 3398 3414 #3395=(1,2304,192)f32 prim::NumToTensor pnnx_1600 1 1 3414 B.21 aten::Int pnnx_1601 1 1 B.21 3416 aten::Int pnnx_1602 1 1 B.21 3417 aten::size pnnx_1603 2 1 3395 3399 3418 #3395=(1,2304,192)f32 prim::NumToTensor pnnx_1604 1 1 3418 C.39 aten::Int pnnx_1605 1 1 C.39 3420 aten::Int pnnx_1606 1 1 C.39 3421 aten::Int pnnx_1607 1 1 C.39 3422 aten::Int pnnx_1608 1 1 C.39 3423 nn.LayerNorm layers_dfe.1.residual_group.blocks.1.norm1 1 1 3395 3424 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #3395=(1,2304,192)f32 #3424=(1,2304,192)f32 prim::ListConstruct pnnx_1609 4 1 3417 352 592 3423 3425 prim::Constant pnnx_1611 0 1 20467 value=-4 prim::ListConstruct pnnx_1612 2 1 3400 20467 3427 prim::Constant pnnx_1613 0 1 20468 value=2 prim::ListConstruct pnnx_1614 2 1 3401 20468 3428 Tensor.view Tensor.view_1051 2 1 3424 3425 x.17 $input=3424 $shape=3425 #3424=(1,2304,192)f32 #x.17=(1,48,48,192)f32 prim::Constant pnnx_1616 0 1 20469 value=0 torch.roll torch.roll_2426 3 1 x.17 3427 3428 x0.17 $input=x.17 $shifts=3427 $dims=3428 #x.17=(1,48,48,192)f32 #x0.17=(1,48,48,192)f32 aten::size pnnx_1617 2 1 x0.17 20469 3430 #x0.17=(1,48,48,192)f32 prim::NumToTensor pnnx_1618 1 1 3430 B0.17 aten::Int pnnx_1619 1 1 B0.17 3432 prim::Constant pnnx_1620 0 1 20470 value=1 aten::size pnnx_1621 2 1 x0.17 20470 3433 #x0.17=(1,48,48,192)f32 prim::NumToTensor pnnx_1622 1 1 3433 3434 prim::Constant pnnx_1623 0 1 20471 value=2 aten::size pnnx_1624 2 1 x0.17 20471 3435 #x0.17=(1,48,48,192)f32 prim::NumToTensor pnnx_1625 1 1 3435 3436 aten::size pnnx_1626 2 1 x0.17 3402 3437 #x0.17=(1,48,48,192)f32 prim::NumToTensor pnnx_1627 1 1 3437 C0.17 aten::Int pnnx_1628 1 1 C0.17 3439 aten::Int pnnx_1629 1 1 C0.17 3440 aten::div pnnx_1630 3 1 3434 3397 3396 3441 aten::Int pnnx_1631 1 1 3441 3442 prim::Constant pnnx_1632 0 1 20472 value=8 prim::Constant pnnx_1633 0 1 20473 value=trunc aten::div pnnx_1634 3 1 3436 20472 20473 3443 aten::Int pnnx_1635 1 1 3443 3444 prim::Constant pnnx_1636 0 1 20474 value=8 prim::ListConstruct pnnx_1637 6 1 3432 3442 3403 3444 20474 3440 3445 prim::Constant pnnx_1639 0 1 20475 value=0 prim::Constant pnnx_1640 0 1 20476 value=1 prim::Constant pnnx_1641 0 1 20477 value=3 prim::Constant pnnx_1642 0 1 20478 value=2 prim::ListConstruct pnnx_1643 6 1 20475 20476 20477 20478 3404 3405 3447 Tensor.view Tensor.view_1052 2 1 x0.17 3445 x1.17 $input=x0.17 $shape=3445 #x0.17=(1,48,48,192)f32 #x1.17=(1,6,8,6,8,192)f32 prim::Constant pnnx_1647 0 1 20480 value=8 prim::Constant pnnx_1648 0 1 20481 value=8 prim::ListConstruct pnnx_1649 4 1 3406 20480 20481 3439 3450 torch.permute torch.permute_2556 2 1 x1.17 3447 3448 $input=x1.17 $dims=3447 #x1.17=(1,6,8,6,8,192)f32 #3448=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_23 1 1 3448 3449 memory_format=torch.contiguous_format $input=3448 #3448=(1,6,6,8,8,192)f32 #3449=(1,6,6,8,8,192)f32 prim::Constant pnnx_1651 0 1 20482 value=-1 prim::ListConstruct pnnx_1652 3 1 20482 3407 3422 3452 prim::Constant pnnx_1654 0 1 3454 value=1.767767e-01 prim::Constant pnnx_1655 0 1 3455 value=trunc prim::Constant pnnx_1656 0 1 3456 value=6 prim::Constant pnnx_1657 0 1 3457 value=0 prim::Constant pnnx_1658 0 1 3458 value=1 prim::Constant pnnx_1659 0 1 3459 value=2 prim::Constant pnnx_1660 0 1 3460 value=3 prim::Constant pnnx_1661 0 1 3461 value=6 prim::Constant pnnx_1662 0 1 3462 value=4 prim::Constant pnnx_1663 0 1 3463 value=-2 prim::Constant pnnx_1664 0 1 3464 value=-1 prim::Constant pnnx_1665 0 1 3465 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.1.attn 0 1 relative_position_bias_table.17 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.17=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.1.attn 0 1 relative_position_index.17 @relative_position_index=(64,64)i64 #relative_position_index.17=(64,64)i64 Tensor.view Tensor.view_1053 2 1 3449 3450 x_windows.17 $input=3449 $shape=3450 #3449=(1,6,6,8,8,192)f32 #x_windows.17=(36,8,8,192)f32 Tensor.view Tensor.view_1054 2 1 x_windows.17 3452 x2.17 $input=x_windows.17 $shape=3452 #x_windows.17=(36,8,8,192)f32 #x2.17=(36,64,192)f32 aten::size pnnx_1666 2 1 x2.17 3457 3473 #x2.17=(36,64,192)f32 prim::NumToTensor pnnx_1667 1 1 3473 B_.17 aten::Int pnnx_1668 1 1 B_.17 3475 aten::Int pnnx_1669 1 1 B_.17 3476 aten::size pnnx_1670 2 1 x2.17 3458 3477 #x2.17=(36,64,192)f32 prim::NumToTensor pnnx_1671 1 1 3477 N.17 aten::Int pnnx_1672 1 1 N.17 3479 aten::Int pnnx_1673 1 1 N.17 3480 aten::Int pnnx_1674 1 1 N.17 3481 aten::Int pnnx_1675 1 1 N.17 3482 aten::Int pnnx_1676 1 1 N.17 3483 aten::Int pnnx_1677 1 1 N.17 3484 aten::size pnnx_1678 2 1 x2.17 3459 3485 #x2.17=(36,64,192)f32 prim::NumToTensor pnnx_1679 1 1 3485 C.41 aten::Int pnnx_1680 1 1 C.41 3487 nn.Linear layers_dfe.1.residual_group.blocks.1.attn.qkv 1 1 x2.17 3488 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.17=(36,64,192)f32 #3488=(36,64,576)f32 aten::div pnnx_1681 3 1 C.41 3456 3455 3489 aten::Int pnnx_1682 1 1 3489 3490 prim::ListConstruct pnnx_1683 5 1 3476 3484 3460 3461 3490 3491 prim::Constant pnnx_1685 0 1 20483 value=2 prim::Constant pnnx_1686 0 1 20484 value=0 prim::Constant pnnx_1687 0 1 20485 value=3 prim::Constant pnnx_1688 0 1 20486 value=1 prim::ListConstruct pnnx_1689 5 1 20483 20484 20485 20486 3462 3493 Tensor.reshape Tensor.reshape_448 2 1 3488 3491 3492 $input=3488 $shape=3491 #3488=(36,64,576)f32 #3492=(36,64,3,6,32)f32 prim::Constant pnnx_1691 0 1 20487 value=0 prim::Constant pnnx_1692 0 1 20488 value=0 prim::Constant pnnx_1694 0 1 20489 value=0 prim::Constant pnnx_1695 0 1 20490 value=1 prim::Constant pnnx_1697 0 1 20491 value=0 prim::Constant pnnx_1698 0 1 20492 value=2 torch.permute torch.permute_2557 2 1 3492 3493 qkv0.17 $input=3492 $dims=3493 #3492=(36,64,3,6,32)f32 #qkv0.17=(3,36,6,64,32)f32 Tensor.select Tensor.select_671 3 1 qkv0.17 20487 20488 q.17 $input=qkv0.17 $dim=20487 $index=20488 #qkv0.17=(3,36,6,64,32)f32 #q.17=(36,6,64,32)f32 aten::mul pnnx_1700 2 1 q.17 3454 q0.17 #q.17=(36,6,64,32)f32 #q0.17=(36,6,64,32)f32 Tensor.select Tensor.select_672 3 1 qkv0.17 20489 20490 k.17 $input=qkv0.17 $dim=20489 $index=20490 #qkv0.17=(3,36,6,64,32)f32 #k.17=(36,6,64,32)f32 prim::Constant pnnx_1703 0 1 20493 value=-1 prim::ListConstruct pnnx_1704 1 1 20493 3501 Tensor.view Tensor.view_1055 2 1 relative_position_index.17 3501 3502 $input=relative_position_index.17 $shape=3501 #relative_position_index.17=(64,64)i64 #3502=(4096)i64 prim::ListConstruct pnnx_1706 1 1 3502 3503 #3502=(4096)i64 prim::Constant pnnx_1708 0 1 20494 value=64 prim::Constant pnnx_1709 0 1 20495 value=-1 prim::ListConstruct pnnx_1710 3 1 3465 20494 20495 3505 Tensor.index Tensor.index_333 2 1 relative_position_bias_table.17 3503 3504 $input=relative_position_bias_table.17 $expr=3503 #relative_position_bias_table.17=(225,6)f32 #3504=(4096,6)f32 prim::Constant pnnx_1712 0 1 20496 value=2 prim::Constant pnnx_1713 0 1 20497 value=0 prim::Constant pnnx_1714 0 1 20498 value=1 prim::ListConstruct pnnx_1715 3 1 20496 20497 20498 3507 Tensor.view Tensor.view_1056 2 1 3504 3505 relative_position_bias.17 $input=3504 $shape=3505 #3504=(4096,6)f32 #relative_position_bias.17=(64,64,6)f32 prim::Constant pnnx_1719 0 1 20500 value=0 torch.permute torch.permute_2558 2 1 relative_position_bias.17 3507 3508 $input=relative_position_bias.17 $dims=3507 #relative_position_bias.17=(64,64,6)f32 #3508=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_24 1 1 3508 relative_position_bias0.17 memory_format=torch.contiguous_format $input=3508 #3508=(6,64,64)f32 #relative_position_bias0.17=(6,64,64)f32 prim::Constant pnnx_1721 0 1 20501 value=1 torch.transpose torch.transpose_2977 3 1 k.17 3463 3464 3499 $input=k.17 $dim0=3463 $dim1=3464 #k.17=(36,6,64,32)f32 #3499=(36,6,32,64)f32 torch.matmul torch.matmul_2218 2 1 q0.17 3499 attn.35 $input=q0.17 $other=3499 #q0.17=(36,6,64,32)f32 #3499=(36,6,32,64)f32 #attn.35=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3231 2 1 relative_position_bias0.17 20500 3510 $input=relative_position_bias0.17 $dim=20500 #relative_position_bias0.17=(6,64,64)f32 #3510=(1,6,64,64)f32 aten::add pnnx_1722 3 1 attn.35 3510 20501 attn0.9 #attn.35=(36,6,64,64)f32 #3510=(1,6,64,64)f32 #attn0.9=(36,6,64,64)f32 prim::Constant pnnx_1723 0 1 20502 value=0 aten::size pnnx_1724 2 1 attn_mask.9 20502 3512 #attn_mask.9=(36,64,64)f32 prim::NumToTensor pnnx_1725 1 1 3512 other.9 aten::Int pnnx_1726 1 1 other.9 3514 prim::Constant pnnx_1727 0 1 20503 value=trunc aten::div pnnx_1728 3 1 B_.17 other.9 20503 3515 aten::Int pnnx_1729 1 1 3515 3516 prim::Constant pnnx_1730 0 1 20504 value=6 prim::ListConstruct pnnx_1731 5 1 3516 3514 20504 3483 3482 3517 prim::Constant pnnx_1733 0 1 20505 value=1 prim::Constant pnnx_1735 0 1 20506 value=0 prim::Constant pnnx_1737 0 1 20507 value=1 Tensor.view Tensor.view_1057 2 1 attn0.9 3517 3518 $input=attn0.9 $shape=3517 #attn0.9=(36,6,64,64)f32 #3518=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3232 2 1 attn_mask.9 20505 3519 $input=attn_mask.9 $dim=20505 #attn_mask.9=(36,64,64)f32 #3519=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3233 2 1 3519 20506 3520 $input=3519 $dim=20506 #3519=(36,1,64,64)f32 #3520=(1,36,1,64,64)f32 aten::add pnnx_1738 3 1 3518 3520 20507 attn1.9 #3518=(1,36,6,64,64)f32 #3520=(1,36,1,64,64)f32 #attn1.9=(1,36,6,64,64)f32 prim::Constant pnnx_1739 0 1 20508 value=-1 prim::Constant pnnx_1740 0 1 20509 value=6 prim::ListConstruct pnnx_1741 4 1 20508 20509 3481 3480 3522 Tensor.view Tensor.view_1058 2 1 attn1.9 3522 input.41 $input=attn1.9 $shape=3522 #attn1.9=(1,36,6,64,64)f32 #input.41=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.1.attn.softmax 1 1 input.41 3524 dim=-1 #input.41=(36,6,64,64)f32 #3524=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.attn.attn_drop 1 1 3524 3525 #3524=(36,6,64,64)f32 #3525=(36,6,64,64)f32 Tensor.select Tensor.select_673 3 1 qkv0.17 20491 20492 v.17 $input=qkv0.17 $dim=20491 $index=20492 #qkv0.17=(3,36,6,64,32)f32 #v.17=(36,6,64,32)f32 prim::Constant pnnx_1744 0 1 20510 value=1 prim::Constant pnnx_1745 0 1 20511 value=2 torch.matmul torch.matmul_2219 2 1 3525 v.17 3526 $input=3525 $other=v.17 #3525=(36,6,64,64)f32 #v.17=(36,6,64,32)f32 #3526=(36,6,64,32)f32 prim::ListConstruct pnnx_1747 3 1 3475 3479 3487 3528 torch.transpose torch.transpose_2978 3 1 3526 20510 20511 3527 $input=3526 $dim0=20510 $dim1=20511 #3526=(36,6,64,32)f32 #3527=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_449 2 1 3527 3528 input0.19 $input=3527 $shape=3528 #3527=(36,64,6,32)f32 #input0.19=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.1.attn.proj 1 1 input0.19 3530 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.19=(36,64,192)f32 #3530=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.attn.proj_drop 1 1 3530 3531 #3530=(36,64,192)f32 #3531=(36,64,192)f32 prim::Constant pnnx_1749 0 1 20512 value=-1 prim::Constant pnnx_1750 0 1 20513 value=8 prim::Constant pnnx_1751 0 1 20514 value=8 prim::ListConstruct pnnx_1752 4 1 20512 20513 20514 3421 3532 prim::Constant pnnx_1754 0 1 20515 value=8 prim::Constant pnnx_1755 0 1 20516 value=trunc aten::div pnnx_1756 3 1 H.1 20515 20516 3534 aten::Int pnnx_1757 1 1 3534 3535 prim::Constant pnnx_1758 0 1 20517 value=8 prim::Constant pnnx_1759 0 1 20518 value=trunc aten::div pnnx_1760 3 1 W.1 20517 20518 3536 aten::Int pnnx_1761 1 1 3536 3537 prim::Constant pnnx_1762 0 1 20519 value=1 prim::Constant pnnx_1763 0 1 20520 value=8 prim::Constant pnnx_1764 0 1 20521 value=8 prim::Constant pnnx_1765 0 1 20522 value=-1 prim::ListConstruct pnnx_1766 6 1 20519 3535 3537 20520 20521 20522 3538 prim::Constant pnnx_1768 0 1 20523 value=0 prim::Constant pnnx_1769 0 1 20524 value=1 prim::Constant pnnx_1770 0 1 20525 value=3 prim::Constant pnnx_1771 0 1 20526 value=2 prim::Constant pnnx_1772 0 1 20527 value=4 prim::Constant pnnx_1773 0 1 20528 value=5 prim::ListConstruct pnnx_1774 6 1 20523 20524 20525 20526 20527 20528 3540 Tensor.view Tensor.view_1059 2 1 3531 3532 windows.17 $input=3531 $shape=3532 #3531=(36,64,192)f32 #windows.17=(36,8,8,192)f32 Tensor.view Tensor.view_1060 2 1 windows.17 3538 x3.17 $input=windows.17 $shape=3538 #windows.17=(36,8,8,192)f32 #x3.17=(1,6,6,8,8,192)f32 prim::Constant pnnx_1778 0 1 20530 value=1 prim::Constant pnnx_1779 0 1 20531 value=-1 prim::ListConstruct pnnx_1780 4 1 20530 349 589 20531 3543 torch.permute torch.permute_2559 2 1 x3.17 3540 3541 $input=x3.17 $dims=3540 #x3.17=(1,6,6,8,8,192)f32 #3541=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_25 1 1 3541 3542 memory_format=torch.contiguous_format $input=3541 #3541=(1,6,8,6,8,192)f32 #3542=(1,6,8,6,8,192)f32 prim::Constant pnnx_1782 0 1 20532 value=4 prim::Constant pnnx_1783 0 1 20533 value=4 prim::ListConstruct pnnx_1784 2 1 20532 20533 3545 prim::Constant pnnx_1785 0 1 20534 value=1 prim::Constant pnnx_1786 0 1 20535 value=2 prim::ListConstruct pnnx_1787 2 1 20534 20535 3546 Tensor.view Tensor.view_1061 2 1 3542 3543 shifted_x.9 $input=3542 $shape=3543 #3542=(1,6,8,6,8,192)f32 #shifted_x.9=(1,48,48,192)f32 aten::mul pnnx_1789 2 1 H.1 W.1 3548 aten::Int pnnx_1790 1 1 3548 3549 prim::ListConstruct pnnx_1791 3 1 3416 3549 3420 3550 prim::Constant pnnx_1793 0 1 3552 value=None prim::Constant pnnx_1794 0 1 20536 value=1 torch.roll torch.roll_2427 3 1 shifted_x.9 3545 3546 x4.17 $input=shifted_x.9 $shifts=3545 $dims=3546 #shifted_x.9=(1,48,48,192)f32 #x4.17=(1,48,48,192)f32 Tensor.view Tensor.view_1062 2 1 x4.17 3550 x5.9 $input=x4.17 $shape=3550 #x4.17=(1,48,48,192)f32 #x5.9=(1,2304,192)f32 aten::add pnnx_1795 3 1 3395 x5.9 20536 input.43 #3395=(1,2304,192)f32 #x5.9=(1,2304,192)f32 #input.43=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.1.norm2 1 1 input.43 3554 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.43=(1,2304,192)f32 #3554=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.1.mlp.fc1 1 1 3554 3559 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #3554=(1,2304,192)f32 #3559=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.1.mlp.act 1 1 3559 3560 #3559=(1,2304,384)f32 #3560=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.mlp.drop 1 1 3560 3561 #3560=(1,2304,384)f32 #3561=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.1.mlp.fc2 1 1 3561 3562 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #3561=(1,2304,384)f32 #3562=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.mlp.drop 1 1 3562 3563 #3562=(1,2304,192)f32 #3563=(1,2304,192)f32 prim::Constant pnnx_1796 0 1 3564 value=None prim::Constant pnnx_1797 0 1 20537 value=1 aten::add pnnx_1798 3 1 input.43 3563 20537 3565 #input.43=(1,2304,192)f32 #3563=(1,2304,192)f32 #3565=(1,2304,192)f32 prim::Constant pnnx_1799 0 1 3566 value=trunc prim::Constant pnnx_1800 0 1 3567 value=8 prim::Constant pnnx_1801 0 1 3568 value=0 prim::Constant pnnx_1802 0 1 3569 value=2 prim::Constant pnnx_1803 0 1 3570 value=1 prim::Constant pnnx_1804 0 1 3571 value=3 prim::Constant pnnx_1805 0 1 3572 value=8 prim::Constant pnnx_1806 0 1 3573 value=4 prim::Constant pnnx_1807 0 1 3574 value=5 prim::Constant pnnx_1808 0 1 3575 value=-1 prim::Constant pnnx_1809 0 1 3576 value=64 aten::size pnnx_1810 2 1 3565 3568 3582 #3565=(1,2304,192)f32 prim::NumToTensor pnnx_1811 1 1 3582 B.23 aten::Int pnnx_1812 1 1 B.23 3584 aten::Int pnnx_1813 1 1 B.23 3585 aten::size pnnx_1814 2 1 3565 3569 3586 #3565=(1,2304,192)f32 prim::NumToTensor pnnx_1815 1 1 3586 C.43 aten::Int pnnx_1816 1 1 C.43 3588 aten::Int pnnx_1817 1 1 C.43 3589 aten::Int pnnx_1818 1 1 C.43 3590 aten::Int pnnx_1819 1 1 C.43 3591 nn.LayerNorm layers_dfe.1.residual_group.blocks.2.norm1 1 1 3565 3592 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #3565=(1,2304,192)f32 #3592=(1,2304,192)f32 prim::ListConstruct pnnx_1820 4 1 3585 346 586 3591 3593 prim::Constant pnnx_1822 0 1 20538 value=0 Tensor.view Tensor.view_1063 2 1 3592 3593 x.19 $input=3592 $shape=3593 #3592=(1,2304,192)f32 #x.19=(1,48,48,192)f32 aten::size pnnx_1823 2 1 x.19 20538 3595 #x.19=(1,48,48,192)f32 prim::NumToTensor pnnx_1824 1 1 3595 B0.19 aten::Int pnnx_1825 1 1 B0.19 3597 aten::size pnnx_1826 2 1 x.19 3570 3598 #x.19=(1,48,48,192)f32 prim::NumToTensor pnnx_1827 1 1 3598 3599 prim::Constant pnnx_1828 0 1 20539 value=2 aten::size pnnx_1829 2 1 x.19 20539 3600 #x.19=(1,48,48,192)f32 prim::NumToTensor pnnx_1830 1 1 3600 3601 aten::size pnnx_1831 2 1 x.19 3571 3602 #x.19=(1,48,48,192)f32 prim::NumToTensor pnnx_1832 1 1 3602 C0.19 aten::Int pnnx_1833 1 1 C0.19 3604 aten::Int pnnx_1834 1 1 C0.19 3605 aten::div pnnx_1835 3 1 3599 3567 3566 3606 aten::Int pnnx_1836 1 1 3606 3607 prim::Constant pnnx_1837 0 1 20540 value=8 prim::Constant pnnx_1838 0 1 20541 value=trunc aten::div pnnx_1839 3 1 3601 20540 20541 3608 aten::Int pnnx_1840 1 1 3608 3609 prim::Constant pnnx_1841 0 1 20542 value=8 prim::ListConstruct pnnx_1842 6 1 3597 3607 3572 3609 20542 3605 3610 prim::Constant pnnx_1844 0 1 20543 value=0 prim::Constant pnnx_1845 0 1 20544 value=1 prim::Constant pnnx_1846 0 1 20545 value=3 prim::Constant pnnx_1847 0 1 20546 value=2 prim::ListConstruct pnnx_1848 6 1 20543 20544 20545 20546 3573 3574 3612 Tensor.view Tensor.view_1064 2 1 x.19 3610 x0.19 $input=x.19 $shape=3610 #x.19=(1,48,48,192)f32 #x0.19=(1,6,8,6,8,192)f32 prim::Constant pnnx_1852 0 1 20548 value=8 prim::Constant pnnx_1853 0 1 20549 value=8 prim::ListConstruct pnnx_1854 4 1 3575 20548 20549 3604 3615 torch.permute torch.permute_2560 2 1 x0.19 3612 3613 $input=x0.19 $dims=3612 #x0.19=(1,6,8,6,8,192)f32 #3613=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_26 1 1 3613 3614 memory_format=torch.contiguous_format $input=3613 #3613=(1,6,6,8,8,192)f32 #3614=(1,6,6,8,8,192)f32 prim::Constant pnnx_1856 0 1 20550 value=-1 prim::ListConstruct pnnx_1857 3 1 20550 3576 3590 3617 prim::Constant pnnx_1859 0 1 3619 value=1.767767e-01 prim::Constant pnnx_1860 0 1 3620 value=trunc prim::Constant pnnx_1861 0 1 3621 value=6 prim::Constant pnnx_1862 0 1 3622 value=0 prim::Constant pnnx_1863 0 1 3623 value=1 prim::Constant pnnx_1864 0 1 3624 value=2 prim::Constant pnnx_1865 0 1 3625 value=3 prim::Constant pnnx_1866 0 1 3626 value=6 prim::Constant pnnx_1867 0 1 3627 value=4 prim::Constant pnnx_1868 0 1 3628 value=-2 prim::Constant pnnx_1869 0 1 3629 value=-1 prim::Constant pnnx_1870 0 1 3630 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.2.attn 0 1 relative_position_bias_table.19 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.19=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.2.attn 0 1 relative_position_index.19 @relative_position_index=(64,64)i64 #relative_position_index.19=(64,64)i64 Tensor.view Tensor.view_1065 2 1 3614 3615 x_windows.19 $input=3614 $shape=3615 #3614=(1,6,6,8,8,192)f32 #x_windows.19=(36,8,8,192)f32 Tensor.view Tensor.view_1066 2 1 x_windows.19 3617 x1.19 $input=x_windows.19 $shape=3617 #x_windows.19=(36,8,8,192)f32 #x1.19=(36,64,192)f32 aten::size pnnx_1871 2 1 x1.19 3622 3638 #x1.19=(36,64,192)f32 prim::NumToTensor pnnx_1872 1 1 3638 B_.19 aten::Int pnnx_1873 1 1 B_.19 3640 aten::Int pnnx_1874 1 1 B_.19 3641 aten::size pnnx_1875 2 1 x1.19 3623 3642 #x1.19=(36,64,192)f32 prim::NumToTensor pnnx_1876 1 1 3642 N.19 aten::Int pnnx_1877 1 1 N.19 3644 aten::Int pnnx_1878 1 1 N.19 3645 aten::size pnnx_1879 2 1 x1.19 3624 3646 #x1.19=(36,64,192)f32 prim::NumToTensor pnnx_1880 1 1 3646 C.45 aten::Int pnnx_1881 1 1 C.45 3648 nn.Linear layers_dfe.1.residual_group.blocks.2.attn.qkv 1 1 x1.19 3649 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.19=(36,64,192)f32 #3649=(36,64,576)f32 aten::div pnnx_1882 3 1 C.45 3621 3620 3650 aten::Int pnnx_1883 1 1 3650 3651 prim::ListConstruct pnnx_1884 5 1 3641 3645 3625 3626 3651 3652 prim::Constant pnnx_1886 0 1 20551 value=2 prim::Constant pnnx_1887 0 1 20552 value=0 prim::Constant pnnx_1888 0 1 20553 value=3 prim::Constant pnnx_1889 0 1 20554 value=1 prim::ListConstruct pnnx_1890 5 1 20551 20552 20553 20554 3627 3654 Tensor.reshape Tensor.reshape_450 2 1 3649 3652 3653 $input=3649 $shape=3652 #3649=(36,64,576)f32 #3653=(36,64,3,6,32)f32 prim::Constant pnnx_1892 0 1 20555 value=0 prim::Constant pnnx_1893 0 1 20556 value=0 prim::Constant pnnx_1895 0 1 20557 value=0 prim::Constant pnnx_1896 0 1 20558 value=1 prim::Constant pnnx_1898 0 1 20559 value=0 prim::Constant pnnx_1899 0 1 20560 value=2 torch.permute torch.permute_2561 2 1 3653 3654 qkv0.19 $input=3653 $dims=3654 #3653=(36,64,3,6,32)f32 #qkv0.19=(3,36,6,64,32)f32 Tensor.select Tensor.select_674 3 1 qkv0.19 20555 20556 q.19 $input=qkv0.19 $dim=20555 $index=20556 #qkv0.19=(3,36,6,64,32)f32 #q.19=(36,6,64,32)f32 aten::mul pnnx_1901 2 1 q.19 3619 q0.19 #q.19=(36,6,64,32)f32 #q0.19=(36,6,64,32)f32 Tensor.select Tensor.select_675 3 1 qkv0.19 20557 20558 k.19 $input=qkv0.19 $dim=20557 $index=20558 #qkv0.19=(3,36,6,64,32)f32 #k.19=(36,6,64,32)f32 prim::Constant pnnx_1904 0 1 20561 value=-1 prim::ListConstruct pnnx_1905 1 1 20561 3662 Tensor.view Tensor.view_1067 2 1 relative_position_index.19 3662 3663 $input=relative_position_index.19 $shape=3662 #relative_position_index.19=(64,64)i64 #3663=(4096)i64 prim::ListConstruct pnnx_1907 1 1 3663 3664 #3663=(4096)i64 prim::Constant pnnx_1909 0 1 20562 value=64 prim::Constant pnnx_1910 0 1 20563 value=-1 prim::ListConstruct pnnx_1911 3 1 3630 20562 20563 3666 Tensor.index Tensor.index_334 2 1 relative_position_bias_table.19 3664 3665 $input=relative_position_bias_table.19 $expr=3664 #relative_position_bias_table.19=(225,6)f32 #3665=(4096,6)f32 prim::Constant pnnx_1913 0 1 20564 value=2 prim::Constant pnnx_1914 0 1 20565 value=0 prim::Constant pnnx_1915 0 1 20566 value=1 prim::ListConstruct pnnx_1916 3 1 20564 20565 20566 3668 Tensor.view Tensor.view_1068 2 1 3665 3666 relative_position_bias.19 $input=3665 $shape=3666 #3665=(4096,6)f32 #relative_position_bias.19=(64,64,6)f32 prim::Constant pnnx_1920 0 1 20568 value=0 torch.permute torch.permute_2562 2 1 relative_position_bias.19 3668 3669 $input=relative_position_bias.19 $dims=3668 #relative_position_bias.19=(64,64,6)f32 #3669=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_27 1 1 3669 relative_position_bias0.19 memory_format=torch.contiguous_format $input=3669 #3669=(6,64,64)f32 #relative_position_bias0.19=(6,64,64)f32 prim::Constant pnnx_1922 0 1 20569 value=1 torch.transpose torch.transpose_2979 3 1 k.19 3628 3629 3660 $input=k.19 $dim0=3628 $dim1=3629 #k.19=(36,6,64,32)f32 #3660=(36,6,32,64)f32 torch.matmul torch.matmul_2220 2 1 q0.19 3660 attn.39 $input=q0.19 $other=3660 #q0.19=(36,6,64,32)f32 #3660=(36,6,32,64)f32 #attn.39=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3234 2 1 relative_position_bias0.19 20568 3671 $input=relative_position_bias0.19 $dim=20568 #relative_position_bias0.19=(6,64,64)f32 #3671=(1,6,64,64)f32 aten::add pnnx_1923 3 1 attn.39 3671 20569 input.45 #attn.39=(36,6,64,64)f32 #3671=(1,6,64,64)f32 #input.45=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.2.attn.softmax 1 1 input.45 3673 dim=-1 #input.45=(36,6,64,64)f32 #3673=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.attn.attn_drop 1 1 3673 3674 #3673=(36,6,64,64)f32 #3674=(36,6,64,64)f32 Tensor.select Tensor.select_676 3 1 qkv0.19 20559 20560 v.19 $input=qkv0.19 $dim=20559 $index=20560 #qkv0.19=(3,36,6,64,32)f32 #v.19=(36,6,64,32)f32 prim::Constant pnnx_1925 0 1 20570 value=1 prim::Constant pnnx_1926 0 1 20571 value=2 torch.matmul torch.matmul_2221 2 1 3674 v.19 3675 $input=3674 $other=v.19 #3674=(36,6,64,64)f32 #v.19=(36,6,64,32)f32 #3675=(36,6,64,32)f32 prim::ListConstruct pnnx_1928 3 1 3640 3644 3648 3677 torch.transpose torch.transpose_2980 3 1 3675 20570 20571 3676 $input=3675 $dim0=20570 $dim1=20571 #3675=(36,6,64,32)f32 #3676=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_451 2 1 3676 3677 input0.21 $input=3676 $shape=3677 #3676=(36,64,6,32)f32 #input0.21=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.2.attn.proj 1 1 input0.21 3679 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.21=(36,64,192)f32 #3679=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.attn.proj_drop 1 1 3679 3680 #3679=(36,64,192)f32 #3680=(36,64,192)f32 prim::Constant pnnx_1930 0 1 20572 value=-1 prim::Constant pnnx_1931 0 1 20573 value=8 prim::Constant pnnx_1932 0 1 20574 value=8 prim::ListConstruct pnnx_1933 4 1 20572 20573 20574 3589 3681 prim::Constant pnnx_1935 0 1 20575 value=8 prim::Constant pnnx_1936 0 1 20576 value=trunc aten::div pnnx_1937 3 1 H.1 20575 20576 3683 aten::Int pnnx_1938 1 1 3683 3684 prim::Constant pnnx_1939 0 1 20577 value=8 prim::Constant pnnx_1940 0 1 20578 value=trunc aten::div pnnx_1941 3 1 W.1 20577 20578 3685 aten::Int pnnx_1942 1 1 3685 3686 prim::Constant pnnx_1943 0 1 20579 value=1 prim::Constant pnnx_1944 0 1 20580 value=8 prim::Constant pnnx_1945 0 1 20581 value=8 prim::Constant pnnx_1946 0 1 20582 value=-1 prim::ListConstruct pnnx_1947 6 1 20579 3684 3686 20580 20581 20582 3687 prim::Constant pnnx_1949 0 1 20583 value=0 prim::Constant pnnx_1950 0 1 20584 value=1 prim::Constant pnnx_1951 0 1 20585 value=3 prim::Constant pnnx_1952 0 1 20586 value=2 prim::Constant pnnx_1953 0 1 20587 value=4 prim::Constant pnnx_1954 0 1 20588 value=5 prim::ListConstruct pnnx_1955 6 1 20583 20584 20585 20586 20587 20588 3689 Tensor.view Tensor.view_1069 2 1 3680 3681 windows.19 $input=3680 $shape=3681 #3680=(36,64,192)f32 #windows.19=(36,8,8,192)f32 Tensor.view Tensor.view_1070 2 1 windows.19 3687 x2.19 $input=windows.19 $shape=3687 #windows.19=(36,8,8,192)f32 #x2.19=(1,6,6,8,8,192)f32 prim::Constant pnnx_1959 0 1 20590 value=1 prim::Constant pnnx_1960 0 1 20591 value=-1 prim::ListConstruct pnnx_1961 4 1 20590 343 583 20591 3692 torch.permute torch.permute_2563 2 1 x2.19 3689 3690 $input=x2.19 $dims=3689 #x2.19=(1,6,6,8,8,192)f32 #3690=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_28 1 1 3690 3691 memory_format=torch.contiguous_format $input=3690 #3690=(1,6,8,6,8,192)f32 #3691=(1,6,8,6,8,192)f32 aten::mul pnnx_1963 2 1 H.1 W.1 3694 aten::Int pnnx_1964 1 1 3694 3695 prim::ListConstruct pnnx_1965 3 1 3584 3695 3588 3696 prim::Constant pnnx_1967 0 1 3698 value=None prim::Constant pnnx_1968 0 1 20592 value=1 Tensor.view Tensor.view_1071 2 1 3691 3692 x3.19 $input=3691 $shape=3692 #3691=(1,6,8,6,8,192)f32 #x3.19=(1,48,48,192)f32 Tensor.view Tensor.view_1072 2 1 x3.19 3696 x4.19 $input=x3.19 $shape=3696 #x3.19=(1,48,48,192)f32 #x4.19=(1,2304,192)f32 aten::add pnnx_1969 3 1 3565 x4.19 20592 input.47 #3565=(1,2304,192)f32 #x4.19=(1,2304,192)f32 #input.47=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.2.norm2 1 1 input.47 3700 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.47=(1,2304,192)f32 #3700=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.2.mlp.fc1 1 1 3700 3705 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #3700=(1,2304,192)f32 #3705=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.2.mlp.act 1 1 3705 3706 #3705=(1,2304,384)f32 #3706=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.mlp.drop 1 1 3706 3707 #3706=(1,2304,384)f32 #3707=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.2.mlp.fc2 1 1 3707 3708 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #3707=(1,2304,384)f32 #3708=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.mlp.drop 1 1 3708 3709 #3708=(1,2304,192)f32 #3709=(1,2304,192)f32 prim::Constant pnnx_1970 0 1 3710 value=None prim::Constant pnnx_1971 0 1 20593 value=1 aten::add pnnx_1972 3 1 input.47 3709 20593 3711 #input.47=(1,2304,192)f32 #3709=(1,2304,192)f32 #3711=(1,2304,192)f32 prim::Constant pnnx_1973 0 1 3712 value=trunc prim::Constant pnnx_1974 0 1 3713 value=8 prim::Constant pnnx_1975 0 1 3714 value=0 prim::Constant pnnx_1976 0 1 3715 value=2 prim::Constant pnnx_1977 0 1 3716 value=-4 prim::Constant pnnx_1978 0 1 3717 value=1 prim::Constant pnnx_1979 0 1 3718 value=3 prim::Constant pnnx_1980 0 1 3719 value=8 prim::Constant pnnx_1981 0 1 3720 value=4 prim::Constant pnnx_1982 0 1 3721 value=5 prim::Constant pnnx_1983 0 1 3722 value=-1 prim::Constant pnnx_1984 0 1 3723 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.3 0 1 attn_mask.11 @attn_mask=(36,64,64)f32 #attn_mask.11=(36,64,64)f32 aten::size pnnx_1985 2 1 3711 3714 3730 #3711=(1,2304,192)f32 prim::NumToTensor pnnx_1986 1 1 3730 B.25 aten::Int pnnx_1987 1 1 B.25 3732 aten::Int pnnx_1988 1 1 B.25 3733 aten::size pnnx_1989 2 1 3711 3715 3734 #3711=(1,2304,192)f32 prim::NumToTensor pnnx_1990 1 1 3734 C.47 aten::Int pnnx_1991 1 1 C.47 3736 aten::Int pnnx_1992 1 1 C.47 3737 aten::Int pnnx_1993 1 1 C.47 3738 aten::Int pnnx_1994 1 1 C.47 3739 nn.LayerNorm layers_dfe.1.residual_group.blocks.3.norm1 1 1 3711 3740 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #3711=(1,2304,192)f32 #3740=(1,2304,192)f32 prim::ListConstruct pnnx_1995 4 1 3733 340 580 3739 3741 prim::Constant pnnx_1997 0 1 20594 value=-4 prim::ListConstruct pnnx_1998 2 1 3716 20594 3743 prim::Constant pnnx_1999 0 1 20595 value=2 prim::ListConstruct pnnx_2000 2 1 3717 20595 3744 Tensor.view Tensor.view_1073 2 1 3740 3741 x.21 $input=3740 $shape=3741 #3740=(1,2304,192)f32 #x.21=(1,48,48,192)f32 prim::Constant pnnx_2002 0 1 20596 value=0 torch.roll torch.roll_2428 3 1 x.21 3743 3744 x0.21 $input=x.21 $shifts=3743 $dims=3744 #x.21=(1,48,48,192)f32 #x0.21=(1,48,48,192)f32 aten::size pnnx_2003 2 1 x0.21 20596 3746 #x0.21=(1,48,48,192)f32 prim::NumToTensor pnnx_2004 1 1 3746 B0.21 aten::Int pnnx_2005 1 1 B0.21 3748 prim::Constant pnnx_2006 0 1 20597 value=1 aten::size pnnx_2007 2 1 x0.21 20597 3749 #x0.21=(1,48,48,192)f32 prim::NumToTensor pnnx_2008 1 1 3749 3750 prim::Constant pnnx_2009 0 1 20598 value=2 aten::size pnnx_2010 2 1 x0.21 20598 3751 #x0.21=(1,48,48,192)f32 prim::NumToTensor pnnx_2011 1 1 3751 3752 aten::size pnnx_2012 2 1 x0.21 3718 3753 #x0.21=(1,48,48,192)f32 prim::NumToTensor pnnx_2013 1 1 3753 C0.21 aten::Int pnnx_2014 1 1 C0.21 3755 aten::Int pnnx_2015 1 1 C0.21 3756 aten::div pnnx_2016 3 1 3750 3713 3712 3757 aten::Int pnnx_2017 1 1 3757 3758 prim::Constant pnnx_2018 0 1 20599 value=8 prim::Constant pnnx_2019 0 1 20600 value=trunc aten::div pnnx_2020 3 1 3752 20599 20600 3759 aten::Int pnnx_2021 1 1 3759 3760 prim::Constant pnnx_2022 0 1 20601 value=8 prim::ListConstruct pnnx_2023 6 1 3748 3758 3719 3760 20601 3756 3761 prim::Constant pnnx_2025 0 1 20602 value=0 prim::Constant pnnx_2026 0 1 20603 value=1 prim::Constant pnnx_2027 0 1 20604 value=3 prim::Constant pnnx_2028 0 1 20605 value=2 prim::ListConstruct pnnx_2029 6 1 20602 20603 20604 20605 3720 3721 3763 Tensor.view Tensor.view_1074 2 1 x0.21 3761 x1.21 $input=x0.21 $shape=3761 #x0.21=(1,48,48,192)f32 #x1.21=(1,6,8,6,8,192)f32 prim::Constant pnnx_2033 0 1 20607 value=8 prim::Constant pnnx_2034 0 1 20608 value=8 prim::ListConstruct pnnx_2035 4 1 3722 20607 20608 3755 3766 torch.permute torch.permute_2564 2 1 x1.21 3763 3764 $input=x1.21 $dims=3763 #x1.21=(1,6,8,6,8,192)f32 #3764=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_29 1 1 3764 3765 memory_format=torch.contiguous_format $input=3764 #3764=(1,6,6,8,8,192)f32 #3765=(1,6,6,8,8,192)f32 prim::Constant pnnx_2037 0 1 20609 value=-1 prim::ListConstruct pnnx_2038 3 1 20609 3723 3738 3768 prim::Constant pnnx_2040 0 1 3770 value=1.767767e-01 prim::Constant pnnx_2041 0 1 3771 value=trunc prim::Constant pnnx_2042 0 1 3772 value=6 prim::Constant pnnx_2043 0 1 3773 value=0 prim::Constant pnnx_2044 0 1 3774 value=1 prim::Constant pnnx_2045 0 1 3775 value=2 prim::Constant pnnx_2046 0 1 3776 value=3 prim::Constant pnnx_2047 0 1 3777 value=6 prim::Constant pnnx_2048 0 1 3778 value=4 prim::Constant pnnx_2049 0 1 3779 value=-2 prim::Constant pnnx_2050 0 1 3780 value=-1 prim::Constant pnnx_2051 0 1 3781 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.3.attn 0 1 relative_position_bias_table.21 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.21=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.3.attn 0 1 relative_position_index.21 @relative_position_index=(64,64)i64 #relative_position_index.21=(64,64)i64 Tensor.view Tensor.view_1075 2 1 3765 3766 x_windows.21 $input=3765 $shape=3766 #3765=(1,6,6,8,8,192)f32 #x_windows.21=(36,8,8,192)f32 Tensor.view Tensor.view_1076 2 1 x_windows.21 3768 x2.21 $input=x_windows.21 $shape=3768 #x_windows.21=(36,8,8,192)f32 #x2.21=(36,64,192)f32 aten::size pnnx_2052 2 1 x2.21 3773 3789 #x2.21=(36,64,192)f32 prim::NumToTensor pnnx_2053 1 1 3789 B_.21 aten::Int pnnx_2054 1 1 B_.21 3791 aten::Int pnnx_2055 1 1 B_.21 3792 aten::size pnnx_2056 2 1 x2.21 3774 3793 #x2.21=(36,64,192)f32 prim::NumToTensor pnnx_2057 1 1 3793 N.21 aten::Int pnnx_2058 1 1 N.21 3795 aten::Int pnnx_2059 1 1 N.21 3796 aten::Int pnnx_2060 1 1 N.21 3797 aten::Int pnnx_2061 1 1 N.21 3798 aten::Int pnnx_2062 1 1 N.21 3799 aten::Int pnnx_2063 1 1 N.21 3800 aten::size pnnx_2064 2 1 x2.21 3775 3801 #x2.21=(36,64,192)f32 prim::NumToTensor pnnx_2065 1 1 3801 C.49 aten::Int pnnx_2066 1 1 C.49 3803 nn.Linear layers_dfe.1.residual_group.blocks.3.attn.qkv 1 1 x2.21 3804 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.21=(36,64,192)f32 #3804=(36,64,576)f32 aten::div pnnx_2067 3 1 C.49 3772 3771 3805 aten::Int pnnx_2068 1 1 3805 3806 prim::ListConstruct pnnx_2069 5 1 3792 3800 3776 3777 3806 3807 prim::Constant pnnx_2071 0 1 20610 value=2 prim::Constant pnnx_2072 0 1 20611 value=0 prim::Constant pnnx_2073 0 1 20612 value=3 prim::Constant pnnx_2074 0 1 20613 value=1 prim::ListConstruct pnnx_2075 5 1 20610 20611 20612 20613 3778 3809 Tensor.reshape Tensor.reshape_452 2 1 3804 3807 3808 $input=3804 $shape=3807 #3804=(36,64,576)f32 #3808=(36,64,3,6,32)f32 prim::Constant pnnx_2077 0 1 20614 value=0 prim::Constant pnnx_2078 0 1 20615 value=0 prim::Constant pnnx_2080 0 1 20616 value=0 prim::Constant pnnx_2081 0 1 20617 value=1 prim::Constant pnnx_2083 0 1 20618 value=0 prim::Constant pnnx_2084 0 1 20619 value=2 torch.permute torch.permute_2565 2 1 3808 3809 qkv0.21 $input=3808 $dims=3809 #3808=(36,64,3,6,32)f32 #qkv0.21=(3,36,6,64,32)f32 Tensor.select Tensor.select_677 3 1 qkv0.21 20614 20615 q.21 $input=qkv0.21 $dim=20614 $index=20615 #qkv0.21=(3,36,6,64,32)f32 #q.21=(36,6,64,32)f32 aten::mul pnnx_2086 2 1 q.21 3770 q0.21 #q.21=(36,6,64,32)f32 #q0.21=(36,6,64,32)f32 Tensor.select Tensor.select_678 3 1 qkv0.21 20616 20617 k.21 $input=qkv0.21 $dim=20616 $index=20617 #qkv0.21=(3,36,6,64,32)f32 #k.21=(36,6,64,32)f32 prim::Constant pnnx_2089 0 1 20620 value=-1 prim::ListConstruct pnnx_2090 1 1 20620 3817 Tensor.view Tensor.view_1077 2 1 relative_position_index.21 3817 3818 $input=relative_position_index.21 $shape=3817 #relative_position_index.21=(64,64)i64 #3818=(4096)i64 prim::ListConstruct pnnx_2092 1 1 3818 3819 #3818=(4096)i64 prim::Constant pnnx_2094 0 1 20621 value=64 prim::Constant pnnx_2095 0 1 20622 value=-1 prim::ListConstruct pnnx_2096 3 1 3781 20621 20622 3821 Tensor.index Tensor.index_335 2 1 relative_position_bias_table.21 3819 3820 $input=relative_position_bias_table.21 $expr=3819 #relative_position_bias_table.21=(225,6)f32 #3820=(4096,6)f32 prim::Constant pnnx_2098 0 1 20623 value=2 prim::Constant pnnx_2099 0 1 20624 value=0 prim::Constant pnnx_2100 0 1 20625 value=1 prim::ListConstruct pnnx_2101 3 1 20623 20624 20625 3823 Tensor.view Tensor.view_1078 2 1 3820 3821 relative_position_bias.21 $input=3820 $shape=3821 #3820=(4096,6)f32 #relative_position_bias.21=(64,64,6)f32 prim::Constant pnnx_2105 0 1 20627 value=0 torch.permute torch.permute_2566 2 1 relative_position_bias.21 3823 3824 $input=relative_position_bias.21 $dims=3823 #relative_position_bias.21=(64,64,6)f32 #3824=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_30 1 1 3824 relative_position_bias0.21 memory_format=torch.contiguous_format $input=3824 #3824=(6,64,64)f32 #relative_position_bias0.21=(6,64,64)f32 prim::Constant pnnx_2107 0 1 20628 value=1 torch.transpose torch.transpose_2981 3 1 k.21 3779 3780 3815 $input=k.21 $dim0=3779 $dim1=3780 #k.21=(36,6,64,32)f32 #3815=(36,6,32,64)f32 torch.matmul torch.matmul_2222 2 1 q0.21 3815 attn.43 $input=q0.21 $other=3815 #q0.21=(36,6,64,32)f32 #3815=(36,6,32,64)f32 #attn.43=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3235 2 1 relative_position_bias0.21 20627 3826 $input=relative_position_bias0.21 $dim=20627 #relative_position_bias0.21=(6,64,64)f32 #3826=(1,6,64,64)f32 aten::add pnnx_2108 3 1 attn.43 3826 20628 attn0.11 #attn.43=(36,6,64,64)f32 #3826=(1,6,64,64)f32 #attn0.11=(36,6,64,64)f32 prim::Constant pnnx_2109 0 1 20629 value=0 aten::size pnnx_2110 2 1 attn_mask.11 20629 3828 #attn_mask.11=(36,64,64)f32 prim::NumToTensor pnnx_2111 1 1 3828 other.11 aten::Int pnnx_2112 1 1 other.11 3830 prim::Constant pnnx_2113 0 1 20630 value=trunc aten::div pnnx_2114 3 1 B_.21 other.11 20630 3831 aten::Int pnnx_2115 1 1 3831 3832 prim::Constant pnnx_2116 0 1 20631 value=6 prim::ListConstruct pnnx_2117 5 1 3832 3830 20631 3799 3798 3833 prim::Constant pnnx_2119 0 1 20632 value=1 prim::Constant pnnx_2121 0 1 20633 value=0 prim::Constant pnnx_2123 0 1 20634 value=1 Tensor.view Tensor.view_1079 2 1 attn0.11 3833 3834 $input=attn0.11 $shape=3833 #attn0.11=(36,6,64,64)f32 #3834=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3236 2 1 attn_mask.11 20632 3835 $input=attn_mask.11 $dim=20632 #attn_mask.11=(36,64,64)f32 #3835=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3237 2 1 3835 20633 3836 $input=3835 $dim=20633 #3835=(36,1,64,64)f32 #3836=(1,36,1,64,64)f32 aten::add pnnx_2124 3 1 3834 3836 20634 attn1.11 #3834=(1,36,6,64,64)f32 #3836=(1,36,1,64,64)f32 #attn1.11=(1,36,6,64,64)f32 prim::Constant pnnx_2125 0 1 20635 value=-1 prim::Constant pnnx_2126 0 1 20636 value=6 prim::ListConstruct pnnx_2127 4 1 20635 20636 3797 3796 3838 Tensor.view Tensor.view_1080 2 1 attn1.11 3838 input.49 $input=attn1.11 $shape=3838 #attn1.11=(1,36,6,64,64)f32 #input.49=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.3.attn.softmax 1 1 input.49 3840 dim=-1 #input.49=(36,6,64,64)f32 #3840=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.attn.attn_drop 1 1 3840 3841 #3840=(36,6,64,64)f32 #3841=(36,6,64,64)f32 Tensor.select Tensor.select_679 3 1 qkv0.21 20618 20619 v.21 $input=qkv0.21 $dim=20618 $index=20619 #qkv0.21=(3,36,6,64,32)f32 #v.21=(36,6,64,32)f32 prim::Constant pnnx_2130 0 1 20637 value=1 prim::Constant pnnx_2131 0 1 20638 value=2 torch.matmul torch.matmul_2223 2 1 3841 v.21 3842 $input=3841 $other=v.21 #3841=(36,6,64,64)f32 #v.21=(36,6,64,32)f32 #3842=(36,6,64,32)f32 prim::ListConstruct pnnx_2133 3 1 3791 3795 3803 3844 torch.transpose torch.transpose_2982 3 1 3842 20637 20638 3843 $input=3842 $dim0=20637 $dim1=20638 #3842=(36,6,64,32)f32 #3843=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_453 2 1 3843 3844 input0.23 $input=3843 $shape=3844 #3843=(36,64,6,32)f32 #input0.23=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.3.attn.proj 1 1 input0.23 3846 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.23=(36,64,192)f32 #3846=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.attn.proj_drop 1 1 3846 3847 #3846=(36,64,192)f32 #3847=(36,64,192)f32 prim::Constant pnnx_2135 0 1 20639 value=-1 prim::Constant pnnx_2136 0 1 20640 value=8 prim::Constant pnnx_2137 0 1 20641 value=8 prim::ListConstruct pnnx_2138 4 1 20639 20640 20641 3737 3848 prim::Constant pnnx_2140 0 1 20642 value=8 prim::Constant pnnx_2141 0 1 20643 value=trunc aten::div pnnx_2142 3 1 H.1 20642 20643 3850 aten::Int pnnx_2143 1 1 3850 3851 prim::Constant pnnx_2144 0 1 20644 value=8 prim::Constant pnnx_2145 0 1 20645 value=trunc aten::div pnnx_2146 3 1 W.1 20644 20645 3852 aten::Int pnnx_2147 1 1 3852 3853 prim::Constant pnnx_2148 0 1 20646 value=1 prim::Constant pnnx_2149 0 1 20647 value=8 prim::Constant pnnx_2150 0 1 20648 value=8 prim::Constant pnnx_2151 0 1 20649 value=-1 prim::ListConstruct pnnx_2152 6 1 20646 3851 3853 20647 20648 20649 3854 prim::Constant pnnx_2154 0 1 20650 value=0 prim::Constant pnnx_2155 0 1 20651 value=1 prim::Constant pnnx_2156 0 1 20652 value=3 prim::Constant pnnx_2157 0 1 20653 value=2 prim::Constant pnnx_2158 0 1 20654 value=4 prim::Constant pnnx_2159 0 1 20655 value=5 prim::ListConstruct pnnx_2160 6 1 20650 20651 20652 20653 20654 20655 3856 Tensor.view Tensor.view_1081 2 1 3847 3848 windows.21 $input=3847 $shape=3848 #3847=(36,64,192)f32 #windows.21=(36,8,8,192)f32 Tensor.view Tensor.view_1082 2 1 windows.21 3854 x3.21 $input=windows.21 $shape=3854 #windows.21=(36,8,8,192)f32 #x3.21=(1,6,6,8,8,192)f32 prim::Constant pnnx_2164 0 1 20657 value=1 prim::Constant pnnx_2165 0 1 20658 value=-1 prim::ListConstruct pnnx_2166 4 1 20657 337 577 20658 3859 torch.permute torch.permute_2567 2 1 x3.21 3856 3857 $input=x3.21 $dims=3856 #x3.21=(1,6,6,8,8,192)f32 #3857=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_31 1 1 3857 3858 memory_format=torch.contiguous_format $input=3857 #3857=(1,6,8,6,8,192)f32 #3858=(1,6,8,6,8,192)f32 prim::Constant pnnx_2168 0 1 20659 value=4 prim::Constant pnnx_2169 0 1 20660 value=4 prim::ListConstruct pnnx_2170 2 1 20659 20660 3861 prim::Constant pnnx_2171 0 1 20661 value=1 prim::Constant pnnx_2172 0 1 20662 value=2 prim::ListConstruct pnnx_2173 2 1 20661 20662 3862 Tensor.view Tensor.view_1083 2 1 3858 3859 shifted_x.11 $input=3858 $shape=3859 #3858=(1,6,8,6,8,192)f32 #shifted_x.11=(1,48,48,192)f32 aten::mul pnnx_2175 2 1 H.1 W.1 3864 aten::Int pnnx_2176 1 1 3864 3865 prim::ListConstruct pnnx_2177 3 1 3732 3865 3736 3866 prim::Constant pnnx_2179 0 1 3868 value=None prim::Constant pnnx_2180 0 1 20663 value=1 torch.roll torch.roll_2429 3 1 shifted_x.11 3861 3862 x4.21 $input=shifted_x.11 $shifts=3861 $dims=3862 #shifted_x.11=(1,48,48,192)f32 #x4.21=(1,48,48,192)f32 Tensor.view Tensor.view_1084 2 1 x4.21 3866 x5.11 $input=x4.21 $shape=3866 #x4.21=(1,48,48,192)f32 #x5.11=(1,2304,192)f32 aten::add pnnx_2181 3 1 3711 x5.11 20663 input.51 #3711=(1,2304,192)f32 #x5.11=(1,2304,192)f32 #input.51=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.3.norm2 1 1 input.51 3870 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.51=(1,2304,192)f32 #3870=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.3.mlp.fc1 1 1 3870 3875 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #3870=(1,2304,192)f32 #3875=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.3.mlp.act 1 1 3875 3876 #3875=(1,2304,384)f32 #3876=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.mlp.drop 1 1 3876 3877 #3876=(1,2304,384)f32 #3877=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.3.mlp.fc2 1 1 3877 3878 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #3877=(1,2304,384)f32 #3878=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.mlp.drop 1 1 3878 3879 #3878=(1,2304,192)f32 #3879=(1,2304,192)f32 prim::Constant pnnx_2182 0 1 3880 value=None prim::Constant pnnx_2183 0 1 20664 value=1 aten::add pnnx_2184 3 1 input.51 3879 20664 3881 #input.51=(1,2304,192)f32 #3879=(1,2304,192)f32 #3881=(1,2304,192)f32 prim::Constant pnnx_2185 0 1 3882 value=trunc prim::Constant pnnx_2186 0 1 3883 value=8 prim::Constant pnnx_2187 0 1 3884 value=0 prim::Constant pnnx_2188 0 1 3885 value=2 prim::Constant pnnx_2189 0 1 3886 value=1 prim::Constant pnnx_2190 0 1 3887 value=3 prim::Constant pnnx_2191 0 1 3888 value=8 prim::Constant pnnx_2192 0 1 3889 value=4 prim::Constant pnnx_2193 0 1 3890 value=5 prim::Constant pnnx_2194 0 1 3891 value=-1 prim::Constant pnnx_2195 0 1 3892 value=64 aten::size pnnx_2196 2 1 3881 3884 3898 #3881=(1,2304,192)f32 prim::NumToTensor pnnx_2197 1 1 3898 B.27 aten::Int pnnx_2198 1 1 B.27 3900 aten::Int pnnx_2199 1 1 B.27 3901 aten::size pnnx_2200 2 1 3881 3885 3902 #3881=(1,2304,192)f32 prim::NumToTensor pnnx_2201 1 1 3902 C.51 aten::Int pnnx_2202 1 1 C.51 3904 aten::Int pnnx_2203 1 1 C.51 3905 aten::Int pnnx_2204 1 1 C.51 3906 aten::Int pnnx_2205 1 1 C.51 3907 nn.LayerNorm layers_dfe.1.residual_group.blocks.4.norm1 1 1 3881 3908 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #3881=(1,2304,192)f32 #3908=(1,2304,192)f32 prim::ListConstruct pnnx_2206 4 1 3901 334 574 3907 3909 prim::Constant pnnx_2208 0 1 20665 value=0 Tensor.view Tensor.view_1085 2 1 3908 3909 x.23 $input=3908 $shape=3909 #3908=(1,2304,192)f32 #x.23=(1,48,48,192)f32 aten::size pnnx_2209 2 1 x.23 20665 3911 #x.23=(1,48,48,192)f32 prim::NumToTensor pnnx_2210 1 1 3911 B0.23 aten::Int pnnx_2211 1 1 B0.23 3913 aten::size pnnx_2212 2 1 x.23 3886 3914 #x.23=(1,48,48,192)f32 prim::NumToTensor pnnx_2213 1 1 3914 3915 prim::Constant pnnx_2214 0 1 20666 value=2 aten::size pnnx_2215 2 1 x.23 20666 3916 #x.23=(1,48,48,192)f32 prim::NumToTensor pnnx_2216 1 1 3916 3917 aten::size pnnx_2217 2 1 x.23 3887 3918 #x.23=(1,48,48,192)f32 prim::NumToTensor pnnx_2218 1 1 3918 C0.23 aten::Int pnnx_2219 1 1 C0.23 3920 aten::Int pnnx_2220 1 1 C0.23 3921 aten::div pnnx_2221 3 1 3915 3883 3882 3922 aten::Int pnnx_2222 1 1 3922 3923 prim::Constant pnnx_2223 0 1 20667 value=8 prim::Constant pnnx_2224 0 1 20668 value=trunc aten::div pnnx_2225 3 1 3917 20667 20668 3924 aten::Int pnnx_2226 1 1 3924 3925 prim::Constant pnnx_2227 0 1 20669 value=8 prim::ListConstruct pnnx_2228 6 1 3913 3923 3888 3925 20669 3921 3926 prim::Constant pnnx_2230 0 1 20670 value=0 prim::Constant pnnx_2231 0 1 20671 value=1 prim::Constant pnnx_2232 0 1 20672 value=3 prim::Constant pnnx_2233 0 1 20673 value=2 prim::ListConstruct pnnx_2234 6 1 20670 20671 20672 20673 3889 3890 3928 Tensor.view Tensor.view_1086 2 1 x.23 3926 x0.23 $input=x.23 $shape=3926 #x.23=(1,48,48,192)f32 #x0.23=(1,6,8,6,8,192)f32 prim::Constant pnnx_2238 0 1 20675 value=8 prim::Constant pnnx_2239 0 1 20676 value=8 prim::ListConstruct pnnx_2240 4 1 3891 20675 20676 3920 3931 torch.permute torch.permute_2568 2 1 x0.23 3928 3929 $input=x0.23 $dims=3928 #x0.23=(1,6,8,6,8,192)f32 #3929=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_32 1 1 3929 3930 memory_format=torch.contiguous_format $input=3929 #3929=(1,6,6,8,8,192)f32 #3930=(1,6,6,8,8,192)f32 prim::Constant pnnx_2242 0 1 20677 value=-1 prim::ListConstruct pnnx_2243 3 1 20677 3892 3906 3933 prim::Constant pnnx_2245 0 1 3935 value=1.767767e-01 prim::Constant pnnx_2246 0 1 3936 value=trunc prim::Constant pnnx_2247 0 1 3937 value=6 prim::Constant pnnx_2248 0 1 3938 value=0 prim::Constant pnnx_2249 0 1 3939 value=1 prim::Constant pnnx_2250 0 1 3940 value=2 prim::Constant pnnx_2251 0 1 3941 value=3 prim::Constant pnnx_2252 0 1 3942 value=6 prim::Constant pnnx_2253 0 1 3943 value=4 prim::Constant pnnx_2254 0 1 3944 value=-2 prim::Constant pnnx_2255 0 1 3945 value=-1 prim::Constant pnnx_2256 0 1 3946 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.4.attn 0 1 relative_position_bias_table.23 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.23=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.4.attn 0 1 relative_position_index.23 @relative_position_index=(64,64)i64 #relative_position_index.23=(64,64)i64 Tensor.view Tensor.view_1087 2 1 3930 3931 x_windows.23 $input=3930 $shape=3931 #3930=(1,6,6,8,8,192)f32 #x_windows.23=(36,8,8,192)f32 Tensor.view Tensor.view_1088 2 1 x_windows.23 3933 x1.23 $input=x_windows.23 $shape=3933 #x_windows.23=(36,8,8,192)f32 #x1.23=(36,64,192)f32 aten::size pnnx_2257 2 1 x1.23 3938 3954 #x1.23=(36,64,192)f32 prim::NumToTensor pnnx_2258 1 1 3954 B_.23 aten::Int pnnx_2259 1 1 B_.23 3956 aten::Int pnnx_2260 1 1 B_.23 3957 aten::size pnnx_2261 2 1 x1.23 3939 3958 #x1.23=(36,64,192)f32 prim::NumToTensor pnnx_2262 1 1 3958 N.23 aten::Int pnnx_2263 1 1 N.23 3960 aten::Int pnnx_2264 1 1 N.23 3961 aten::size pnnx_2265 2 1 x1.23 3940 3962 #x1.23=(36,64,192)f32 prim::NumToTensor pnnx_2266 1 1 3962 C.53 aten::Int pnnx_2267 1 1 C.53 3964 nn.Linear layers_dfe.1.residual_group.blocks.4.attn.qkv 1 1 x1.23 3965 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.23=(36,64,192)f32 #3965=(36,64,576)f32 aten::div pnnx_2268 3 1 C.53 3937 3936 3966 aten::Int pnnx_2269 1 1 3966 3967 prim::ListConstruct pnnx_2270 5 1 3957 3961 3941 3942 3967 3968 prim::Constant pnnx_2272 0 1 20678 value=2 prim::Constant pnnx_2273 0 1 20679 value=0 prim::Constant pnnx_2274 0 1 20680 value=3 prim::Constant pnnx_2275 0 1 20681 value=1 prim::ListConstruct pnnx_2276 5 1 20678 20679 20680 20681 3943 3970 Tensor.reshape Tensor.reshape_454 2 1 3965 3968 3969 $input=3965 $shape=3968 #3965=(36,64,576)f32 #3969=(36,64,3,6,32)f32 prim::Constant pnnx_2278 0 1 20682 value=0 prim::Constant pnnx_2279 0 1 20683 value=0 prim::Constant pnnx_2281 0 1 20684 value=0 prim::Constant pnnx_2282 0 1 20685 value=1 prim::Constant pnnx_2284 0 1 20686 value=0 prim::Constant pnnx_2285 0 1 20687 value=2 torch.permute torch.permute_2569 2 1 3969 3970 qkv0.23 $input=3969 $dims=3970 #3969=(36,64,3,6,32)f32 #qkv0.23=(3,36,6,64,32)f32 Tensor.select Tensor.select_680 3 1 qkv0.23 20682 20683 q.23 $input=qkv0.23 $dim=20682 $index=20683 #qkv0.23=(3,36,6,64,32)f32 #q.23=(36,6,64,32)f32 aten::mul pnnx_2287 2 1 q.23 3935 q0.23 #q.23=(36,6,64,32)f32 #q0.23=(36,6,64,32)f32 Tensor.select Tensor.select_681 3 1 qkv0.23 20684 20685 k.23 $input=qkv0.23 $dim=20684 $index=20685 #qkv0.23=(3,36,6,64,32)f32 #k.23=(36,6,64,32)f32 prim::Constant pnnx_2290 0 1 20688 value=-1 prim::ListConstruct pnnx_2291 1 1 20688 3978 Tensor.view Tensor.view_1089 2 1 relative_position_index.23 3978 3979 $input=relative_position_index.23 $shape=3978 #relative_position_index.23=(64,64)i64 #3979=(4096)i64 prim::ListConstruct pnnx_2293 1 1 3979 3980 #3979=(4096)i64 prim::Constant pnnx_2295 0 1 20689 value=64 prim::Constant pnnx_2296 0 1 20690 value=-1 prim::ListConstruct pnnx_2297 3 1 3946 20689 20690 3982 Tensor.index Tensor.index_336 2 1 relative_position_bias_table.23 3980 3981 $input=relative_position_bias_table.23 $expr=3980 #relative_position_bias_table.23=(225,6)f32 #3981=(4096,6)f32 prim::Constant pnnx_2299 0 1 20691 value=2 prim::Constant pnnx_2300 0 1 20692 value=0 prim::Constant pnnx_2301 0 1 20693 value=1 prim::ListConstruct pnnx_2302 3 1 20691 20692 20693 3984 Tensor.view Tensor.view_1090 2 1 3981 3982 relative_position_bias.23 $input=3981 $shape=3982 #3981=(4096,6)f32 #relative_position_bias.23=(64,64,6)f32 prim::Constant pnnx_2306 0 1 20695 value=0 torch.permute torch.permute_2570 2 1 relative_position_bias.23 3984 3985 $input=relative_position_bias.23 $dims=3984 #relative_position_bias.23=(64,64,6)f32 #3985=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_33 1 1 3985 relative_position_bias0.23 memory_format=torch.contiguous_format $input=3985 #3985=(6,64,64)f32 #relative_position_bias0.23=(6,64,64)f32 prim::Constant pnnx_2308 0 1 20696 value=1 torch.transpose torch.transpose_2983 3 1 k.23 3944 3945 3976 $input=k.23 $dim0=3944 $dim1=3945 #k.23=(36,6,64,32)f32 #3976=(36,6,32,64)f32 torch.matmul torch.matmul_2224 2 1 q0.23 3976 attn.47 $input=q0.23 $other=3976 #q0.23=(36,6,64,32)f32 #3976=(36,6,32,64)f32 #attn.47=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3238 2 1 relative_position_bias0.23 20695 3987 $input=relative_position_bias0.23 $dim=20695 #relative_position_bias0.23=(6,64,64)f32 #3987=(1,6,64,64)f32 aten::add pnnx_2309 3 1 attn.47 3987 20696 input.53 #attn.47=(36,6,64,64)f32 #3987=(1,6,64,64)f32 #input.53=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.4.attn.softmax 1 1 input.53 3989 dim=-1 #input.53=(36,6,64,64)f32 #3989=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.attn.attn_drop 1 1 3989 3990 #3989=(36,6,64,64)f32 #3990=(36,6,64,64)f32 Tensor.select Tensor.select_682 3 1 qkv0.23 20686 20687 v.23 $input=qkv0.23 $dim=20686 $index=20687 #qkv0.23=(3,36,6,64,32)f32 #v.23=(36,6,64,32)f32 prim::Constant pnnx_2311 0 1 20697 value=1 prim::Constant pnnx_2312 0 1 20698 value=2 torch.matmul torch.matmul_2225 2 1 3990 v.23 3991 $input=3990 $other=v.23 #3990=(36,6,64,64)f32 #v.23=(36,6,64,32)f32 #3991=(36,6,64,32)f32 prim::ListConstruct pnnx_2314 3 1 3956 3960 3964 3993 torch.transpose torch.transpose_2984 3 1 3991 20697 20698 3992 $input=3991 $dim0=20697 $dim1=20698 #3991=(36,6,64,32)f32 #3992=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_455 2 1 3992 3993 input0.25 $input=3992 $shape=3993 #3992=(36,64,6,32)f32 #input0.25=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.4.attn.proj 1 1 input0.25 3995 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.25=(36,64,192)f32 #3995=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.attn.proj_drop 1 1 3995 3996 #3995=(36,64,192)f32 #3996=(36,64,192)f32 prim::Constant pnnx_2316 0 1 20699 value=-1 prim::Constant pnnx_2317 0 1 20700 value=8 prim::Constant pnnx_2318 0 1 20701 value=8 prim::ListConstruct pnnx_2319 4 1 20699 20700 20701 3905 3997 prim::Constant pnnx_2321 0 1 20702 value=8 prim::Constant pnnx_2322 0 1 20703 value=trunc aten::div pnnx_2323 3 1 H.1 20702 20703 3999 aten::Int pnnx_2324 1 1 3999 4000 prim::Constant pnnx_2325 0 1 20704 value=8 prim::Constant pnnx_2326 0 1 20705 value=trunc aten::div pnnx_2327 3 1 W.1 20704 20705 4001 aten::Int pnnx_2328 1 1 4001 4002 prim::Constant pnnx_2329 0 1 20706 value=1 prim::Constant pnnx_2330 0 1 20707 value=8 prim::Constant pnnx_2331 0 1 20708 value=8 prim::Constant pnnx_2332 0 1 20709 value=-1 prim::ListConstruct pnnx_2333 6 1 20706 4000 4002 20707 20708 20709 4003 prim::Constant pnnx_2335 0 1 20710 value=0 prim::Constant pnnx_2336 0 1 20711 value=1 prim::Constant pnnx_2337 0 1 20712 value=3 prim::Constant pnnx_2338 0 1 20713 value=2 prim::Constant pnnx_2339 0 1 20714 value=4 prim::Constant pnnx_2340 0 1 20715 value=5 prim::ListConstruct pnnx_2341 6 1 20710 20711 20712 20713 20714 20715 4005 Tensor.view Tensor.view_1091 2 1 3996 3997 windows.23 $input=3996 $shape=3997 #3996=(36,64,192)f32 #windows.23=(36,8,8,192)f32 Tensor.view Tensor.view_1092 2 1 windows.23 4003 x2.23 $input=windows.23 $shape=4003 #windows.23=(36,8,8,192)f32 #x2.23=(1,6,6,8,8,192)f32 prim::Constant pnnx_2345 0 1 20717 value=1 prim::Constant pnnx_2346 0 1 20718 value=-1 prim::ListConstruct pnnx_2347 4 1 20717 331 571 20718 4008 torch.permute torch.permute_2571 2 1 x2.23 4005 4006 $input=x2.23 $dims=4005 #x2.23=(1,6,6,8,8,192)f32 #4006=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_34 1 1 4006 4007 memory_format=torch.contiguous_format $input=4006 #4006=(1,6,8,6,8,192)f32 #4007=(1,6,8,6,8,192)f32 aten::mul pnnx_2349 2 1 H.1 W.1 4010 aten::Int pnnx_2350 1 1 4010 4011 prim::ListConstruct pnnx_2351 3 1 3900 4011 3904 4012 prim::Constant pnnx_2353 0 1 4014 value=None prim::Constant pnnx_2354 0 1 20719 value=1 Tensor.view Tensor.view_1093 2 1 4007 4008 x3.23 $input=4007 $shape=4008 #4007=(1,6,8,6,8,192)f32 #x3.23=(1,48,48,192)f32 Tensor.view Tensor.view_1094 2 1 x3.23 4012 x4.23 $input=x3.23 $shape=4012 #x3.23=(1,48,48,192)f32 #x4.23=(1,2304,192)f32 aten::add pnnx_2355 3 1 3881 x4.23 20719 input.55 #3881=(1,2304,192)f32 #x4.23=(1,2304,192)f32 #input.55=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.4.norm2 1 1 input.55 4016 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.55=(1,2304,192)f32 #4016=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.4.mlp.fc1 1 1 4016 4021 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4016=(1,2304,192)f32 #4021=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.4.mlp.act 1 1 4021 4022 #4021=(1,2304,384)f32 #4022=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.mlp.drop 1 1 4022 4023 #4022=(1,2304,384)f32 #4023=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.4.mlp.fc2 1 1 4023 4024 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #4023=(1,2304,384)f32 #4024=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.mlp.drop 1 1 4024 4025 #4024=(1,2304,192)f32 #4025=(1,2304,192)f32 prim::Constant pnnx_2356 0 1 4026 value=None prim::Constant pnnx_2357 0 1 20720 value=1 aten::add pnnx_2358 3 1 input.55 4025 20720 4027 #input.55=(1,2304,192)f32 #4025=(1,2304,192)f32 #4027=(1,2304,192)f32 prim::Constant pnnx_2359 0 1 4028 value=trunc prim::Constant pnnx_2360 0 1 4029 value=8 prim::Constant pnnx_2361 0 1 4030 value=0 prim::Constant pnnx_2362 0 1 4031 value=2 prim::Constant pnnx_2363 0 1 4032 value=-4 prim::Constant pnnx_2364 0 1 4033 value=1 prim::Constant pnnx_2365 0 1 4034 value=3 prim::Constant pnnx_2366 0 1 4035 value=8 prim::Constant pnnx_2367 0 1 4036 value=4 prim::Constant pnnx_2368 0 1 4037 value=5 prim::Constant pnnx_2369 0 1 4038 value=-1 prim::Constant pnnx_2370 0 1 4039 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.5 0 1 attn_mask.13 @attn_mask=(36,64,64)f32 #attn_mask.13=(36,64,64)f32 aten::size pnnx_2371 2 1 4027 4030 4046 #4027=(1,2304,192)f32 prim::NumToTensor pnnx_2372 1 1 4046 B.29 aten::Int pnnx_2373 1 1 B.29 4048 aten::Int pnnx_2374 1 1 B.29 4049 aten::size pnnx_2375 2 1 4027 4031 4050 #4027=(1,2304,192)f32 prim::NumToTensor pnnx_2376 1 1 4050 C.55 aten::Int pnnx_2377 1 1 C.55 4052 aten::Int pnnx_2378 1 1 C.55 4053 aten::Int pnnx_2379 1 1 C.55 4054 aten::Int pnnx_2380 1 1 C.55 4055 nn.LayerNorm layers_dfe.1.residual_group.blocks.5.norm1 1 1 4027 4056 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #4027=(1,2304,192)f32 #4056=(1,2304,192)f32 prim::ListConstruct pnnx_2381 4 1 4049 328 568 4055 4057 prim::Constant pnnx_2383 0 1 20721 value=-4 prim::ListConstruct pnnx_2384 2 1 4032 20721 4059 prim::Constant pnnx_2385 0 1 20722 value=2 prim::ListConstruct pnnx_2386 2 1 4033 20722 4060 Tensor.view Tensor.view_1095 2 1 4056 4057 x.25 $input=4056 $shape=4057 #4056=(1,2304,192)f32 #x.25=(1,48,48,192)f32 prim::Constant pnnx_2388 0 1 20723 value=0 torch.roll torch.roll_2430 3 1 x.25 4059 4060 x0.25 $input=x.25 $shifts=4059 $dims=4060 #x.25=(1,48,48,192)f32 #x0.25=(1,48,48,192)f32 aten::size pnnx_2389 2 1 x0.25 20723 4062 #x0.25=(1,48,48,192)f32 prim::NumToTensor pnnx_2390 1 1 4062 B0.25 aten::Int pnnx_2391 1 1 B0.25 4064 prim::Constant pnnx_2392 0 1 20724 value=1 aten::size pnnx_2393 2 1 x0.25 20724 4065 #x0.25=(1,48,48,192)f32 prim::NumToTensor pnnx_2394 1 1 4065 4066 prim::Constant pnnx_2395 0 1 20725 value=2 aten::size pnnx_2396 2 1 x0.25 20725 4067 #x0.25=(1,48,48,192)f32 prim::NumToTensor pnnx_2397 1 1 4067 4068 aten::size pnnx_2398 2 1 x0.25 4034 4069 #x0.25=(1,48,48,192)f32 prim::NumToTensor pnnx_2399 1 1 4069 C0.25 aten::Int pnnx_2400 1 1 C0.25 4071 aten::Int pnnx_2401 1 1 C0.25 4072 aten::div pnnx_2402 3 1 4066 4029 4028 4073 aten::Int pnnx_2403 1 1 4073 4074 prim::Constant pnnx_2404 0 1 20726 value=8 prim::Constant pnnx_2405 0 1 20727 value=trunc aten::div pnnx_2406 3 1 4068 20726 20727 4075 aten::Int pnnx_2407 1 1 4075 4076 prim::Constant pnnx_2408 0 1 20728 value=8 prim::ListConstruct pnnx_2409 6 1 4064 4074 4035 4076 20728 4072 4077 prim::Constant pnnx_2411 0 1 20729 value=0 prim::Constant pnnx_2412 0 1 20730 value=1 prim::Constant pnnx_2413 0 1 20731 value=3 prim::Constant pnnx_2414 0 1 20732 value=2 prim::ListConstruct pnnx_2415 6 1 20729 20730 20731 20732 4036 4037 4079 Tensor.view Tensor.view_1096 2 1 x0.25 4077 x1.25 $input=x0.25 $shape=4077 #x0.25=(1,48,48,192)f32 #x1.25=(1,6,8,6,8,192)f32 prim::Constant pnnx_2419 0 1 20734 value=8 prim::Constant pnnx_2420 0 1 20735 value=8 prim::ListConstruct pnnx_2421 4 1 4038 20734 20735 4071 4082 torch.permute torch.permute_2572 2 1 x1.25 4079 4080 $input=x1.25 $dims=4079 #x1.25=(1,6,8,6,8,192)f32 #4080=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_35 1 1 4080 4081 memory_format=torch.contiguous_format $input=4080 #4080=(1,6,6,8,8,192)f32 #4081=(1,6,6,8,8,192)f32 prim::Constant pnnx_2423 0 1 20736 value=-1 prim::ListConstruct pnnx_2424 3 1 20736 4039 4054 4084 prim::Constant pnnx_2426 0 1 4086 value=1.767767e-01 prim::Constant pnnx_2427 0 1 4087 value=trunc prim::Constant pnnx_2428 0 1 4088 value=6 prim::Constant pnnx_2429 0 1 4089 value=0 prim::Constant pnnx_2430 0 1 4090 value=1 prim::Constant pnnx_2431 0 1 4091 value=2 prim::Constant pnnx_2432 0 1 4092 value=3 prim::Constant pnnx_2433 0 1 4093 value=6 prim::Constant pnnx_2434 0 1 4094 value=4 prim::Constant pnnx_2435 0 1 4095 value=-2 prim::Constant pnnx_2436 0 1 4096 value=-1 prim::Constant pnnx_2437 0 1 4097 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.5.attn 0 1 relative_position_bias_table.25 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.25=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.5.attn 0 1 relative_position_index.25 @relative_position_index=(64,64)i64 #relative_position_index.25=(64,64)i64 Tensor.view Tensor.view_1097 2 1 4081 4082 x_windows.25 $input=4081 $shape=4082 #4081=(1,6,6,8,8,192)f32 #x_windows.25=(36,8,8,192)f32 Tensor.view Tensor.view_1098 2 1 x_windows.25 4084 x2.25 $input=x_windows.25 $shape=4084 #x_windows.25=(36,8,8,192)f32 #x2.25=(36,64,192)f32 aten::size pnnx_2438 2 1 x2.25 4089 4105 #x2.25=(36,64,192)f32 prim::NumToTensor pnnx_2439 1 1 4105 B_.25 aten::Int pnnx_2440 1 1 B_.25 4107 aten::Int pnnx_2441 1 1 B_.25 4108 aten::size pnnx_2442 2 1 x2.25 4090 4109 #x2.25=(36,64,192)f32 prim::NumToTensor pnnx_2443 1 1 4109 N.25 aten::Int pnnx_2444 1 1 N.25 4111 aten::Int pnnx_2445 1 1 N.25 4112 aten::Int pnnx_2446 1 1 N.25 4113 aten::Int pnnx_2447 1 1 N.25 4114 aten::Int pnnx_2448 1 1 N.25 4115 aten::Int pnnx_2449 1 1 N.25 4116 aten::size pnnx_2450 2 1 x2.25 4091 4117 #x2.25=(36,64,192)f32 prim::NumToTensor pnnx_2451 1 1 4117 C.57 aten::Int pnnx_2452 1 1 C.57 4119 nn.Linear layers_dfe.1.residual_group.blocks.5.attn.qkv 1 1 x2.25 4120 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.25=(36,64,192)f32 #4120=(36,64,576)f32 aten::div pnnx_2453 3 1 C.57 4088 4087 4121 aten::Int pnnx_2454 1 1 4121 4122 prim::ListConstruct pnnx_2455 5 1 4108 4116 4092 4093 4122 4123 prim::Constant pnnx_2457 0 1 20737 value=2 prim::Constant pnnx_2458 0 1 20738 value=0 prim::Constant pnnx_2459 0 1 20739 value=3 prim::Constant pnnx_2460 0 1 20740 value=1 prim::ListConstruct pnnx_2461 5 1 20737 20738 20739 20740 4094 4125 Tensor.reshape Tensor.reshape_456 2 1 4120 4123 4124 $input=4120 $shape=4123 #4120=(36,64,576)f32 #4124=(36,64,3,6,32)f32 prim::Constant pnnx_2463 0 1 20741 value=0 prim::Constant pnnx_2464 0 1 20742 value=0 prim::Constant pnnx_2466 0 1 20743 value=0 prim::Constant pnnx_2467 0 1 20744 value=1 prim::Constant pnnx_2469 0 1 20745 value=0 prim::Constant pnnx_2470 0 1 20746 value=2 torch.permute torch.permute_2573 2 1 4124 4125 qkv0.25 $input=4124 $dims=4125 #4124=(36,64,3,6,32)f32 #qkv0.25=(3,36,6,64,32)f32 Tensor.select Tensor.select_683 3 1 qkv0.25 20741 20742 q.25 $input=qkv0.25 $dim=20741 $index=20742 #qkv0.25=(3,36,6,64,32)f32 #q.25=(36,6,64,32)f32 aten::mul pnnx_2472 2 1 q.25 4086 q0.25 #q.25=(36,6,64,32)f32 #q0.25=(36,6,64,32)f32 Tensor.select Tensor.select_684 3 1 qkv0.25 20743 20744 k.25 $input=qkv0.25 $dim=20743 $index=20744 #qkv0.25=(3,36,6,64,32)f32 #k.25=(36,6,64,32)f32 prim::Constant pnnx_2475 0 1 20747 value=-1 prim::ListConstruct pnnx_2476 1 1 20747 4133 Tensor.view Tensor.view_1099 2 1 relative_position_index.25 4133 4134 $input=relative_position_index.25 $shape=4133 #relative_position_index.25=(64,64)i64 #4134=(4096)i64 prim::ListConstruct pnnx_2478 1 1 4134 4135 #4134=(4096)i64 prim::Constant pnnx_2480 0 1 20748 value=64 prim::Constant pnnx_2481 0 1 20749 value=-1 prim::ListConstruct pnnx_2482 3 1 4097 20748 20749 4137 Tensor.index Tensor.index_337 2 1 relative_position_bias_table.25 4135 4136 $input=relative_position_bias_table.25 $expr=4135 #relative_position_bias_table.25=(225,6)f32 #4136=(4096,6)f32 prim::Constant pnnx_2484 0 1 20750 value=2 prim::Constant pnnx_2485 0 1 20751 value=0 prim::Constant pnnx_2486 0 1 20752 value=1 prim::ListConstruct pnnx_2487 3 1 20750 20751 20752 4139 Tensor.view Tensor.view_1100 2 1 4136 4137 relative_position_bias.25 $input=4136 $shape=4137 #4136=(4096,6)f32 #relative_position_bias.25=(64,64,6)f32 prim::Constant pnnx_2491 0 1 20754 value=0 torch.permute torch.permute_2574 2 1 relative_position_bias.25 4139 4140 $input=relative_position_bias.25 $dims=4139 #relative_position_bias.25=(64,64,6)f32 #4140=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_36 1 1 4140 relative_position_bias0.25 memory_format=torch.contiguous_format $input=4140 #4140=(6,64,64)f32 #relative_position_bias0.25=(6,64,64)f32 prim::Constant pnnx_2493 0 1 20755 value=1 torch.transpose torch.transpose_2985 3 1 k.25 4095 4096 4131 $input=k.25 $dim0=4095 $dim1=4096 #k.25=(36,6,64,32)f32 #4131=(36,6,32,64)f32 torch.matmul torch.matmul_2226 2 1 q0.25 4131 attn.51 $input=q0.25 $other=4131 #q0.25=(36,6,64,32)f32 #4131=(36,6,32,64)f32 #attn.51=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3239 2 1 relative_position_bias0.25 20754 4142 $input=relative_position_bias0.25 $dim=20754 #relative_position_bias0.25=(6,64,64)f32 #4142=(1,6,64,64)f32 aten::add pnnx_2494 3 1 attn.51 4142 20755 attn0.13 #attn.51=(36,6,64,64)f32 #4142=(1,6,64,64)f32 #attn0.13=(36,6,64,64)f32 prim::Constant pnnx_2495 0 1 20756 value=0 aten::size pnnx_2496 2 1 attn_mask.13 20756 4144 #attn_mask.13=(36,64,64)f32 prim::NumToTensor pnnx_2497 1 1 4144 other.13 aten::Int pnnx_2498 1 1 other.13 4146 prim::Constant pnnx_2499 0 1 20757 value=trunc aten::div pnnx_2500 3 1 B_.25 other.13 20757 4147 aten::Int pnnx_2501 1 1 4147 4148 prim::Constant pnnx_2502 0 1 20758 value=6 prim::ListConstruct pnnx_2503 5 1 4148 4146 20758 4115 4114 4149 prim::Constant pnnx_2505 0 1 20759 value=1 prim::Constant pnnx_2507 0 1 20760 value=0 prim::Constant pnnx_2509 0 1 20761 value=1 Tensor.view Tensor.view_1101 2 1 attn0.13 4149 4150 $input=attn0.13 $shape=4149 #attn0.13=(36,6,64,64)f32 #4150=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3240 2 1 attn_mask.13 20759 4151 $input=attn_mask.13 $dim=20759 #attn_mask.13=(36,64,64)f32 #4151=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3241 2 1 4151 20760 4152 $input=4151 $dim=20760 #4151=(36,1,64,64)f32 #4152=(1,36,1,64,64)f32 aten::add pnnx_2510 3 1 4150 4152 20761 attn1.13 #4150=(1,36,6,64,64)f32 #4152=(1,36,1,64,64)f32 #attn1.13=(1,36,6,64,64)f32 prim::Constant pnnx_2511 0 1 20762 value=-1 prim::Constant pnnx_2512 0 1 20763 value=6 prim::ListConstruct pnnx_2513 4 1 20762 20763 4113 4112 4154 Tensor.view Tensor.view_1102 2 1 attn1.13 4154 input.57 $input=attn1.13 $shape=4154 #attn1.13=(1,36,6,64,64)f32 #input.57=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.5.attn.softmax 1 1 input.57 4156 dim=-1 #input.57=(36,6,64,64)f32 #4156=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.attn.attn_drop 1 1 4156 4157 #4156=(36,6,64,64)f32 #4157=(36,6,64,64)f32 Tensor.select Tensor.select_685 3 1 qkv0.25 20745 20746 v.25 $input=qkv0.25 $dim=20745 $index=20746 #qkv0.25=(3,36,6,64,32)f32 #v.25=(36,6,64,32)f32 prim::Constant pnnx_2516 0 1 20764 value=1 prim::Constant pnnx_2517 0 1 20765 value=2 torch.matmul torch.matmul_2227 2 1 4157 v.25 4158 $input=4157 $other=v.25 #4157=(36,6,64,64)f32 #v.25=(36,6,64,32)f32 #4158=(36,6,64,32)f32 prim::ListConstruct pnnx_2519 3 1 4107 4111 4119 4160 torch.transpose torch.transpose_2986 3 1 4158 20764 20765 4159 $input=4158 $dim0=20764 $dim1=20765 #4158=(36,6,64,32)f32 #4159=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_457 2 1 4159 4160 input0.27 $input=4159 $shape=4160 #4159=(36,64,6,32)f32 #input0.27=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.5.attn.proj 1 1 input0.27 4162 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.27=(36,64,192)f32 #4162=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.attn.proj_drop 1 1 4162 4163 #4162=(36,64,192)f32 #4163=(36,64,192)f32 prim::Constant pnnx_2521 0 1 20766 value=-1 prim::Constant pnnx_2522 0 1 20767 value=8 prim::Constant pnnx_2523 0 1 20768 value=8 prim::ListConstruct pnnx_2524 4 1 20766 20767 20768 4053 4164 prim::Constant pnnx_2526 0 1 20769 value=8 prim::Constant pnnx_2527 0 1 20770 value=trunc aten::div pnnx_2528 3 1 H.1 20769 20770 4166 aten::Int pnnx_2529 1 1 4166 4167 prim::Constant pnnx_2530 0 1 20771 value=8 prim::Constant pnnx_2531 0 1 20772 value=trunc aten::div pnnx_2532 3 1 W.1 20771 20772 4168 aten::Int pnnx_2533 1 1 4168 4169 prim::Constant pnnx_2534 0 1 20773 value=1 prim::Constant pnnx_2535 0 1 20774 value=8 prim::Constant pnnx_2536 0 1 20775 value=8 prim::Constant pnnx_2537 0 1 20776 value=-1 prim::ListConstruct pnnx_2538 6 1 20773 4167 4169 20774 20775 20776 4170 prim::Constant pnnx_2540 0 1 20777 value=0 prim::Constant pnnx_2541 0 1 20778 value=1 prim::Constant pnnx_2542 0 1 20779 value=3 prim::Constant pnnx_2543 0 1 20780 value=2 prim::Constant pnnx_2544 0 1 20781 value=4 prim::Constant pnnx_2545 0 1 20782 value=5 prim::ListConstruct pnnx_2546 6 1 20777 20778 20779 20780 20781 20782 4172 Tensor.view Tensor.view_1103 2 1 4163 4164 windows.25 $input=4163 $shape=4164 #4163=(36,64,192)f32 #windows.25=(36,8,8,192)f32 Tensor.view Tensor.view_1104 2 1 windows.25 4170 x3.25 $input=windows.25 $shape=4170 #windows.25=(36,8,8,192)f32 #x3.25=(1,6,6,8,8,192)f32 prim::Constant pnnx_2550 0 1 20784 value=1 prim::Constant pnnx_2551 0 1 20785 value=-1 prim::ListConstruct pnnx_2552 4 1 20784 325 565 20785 4175 torch.permute torch.permute_2575 2 1 x3.25 4172 4173 $input=x3.25 $dims=4172 #x3.25=(1,6,6,8,8,192)f32 #4173=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_37 1 1 4173 4174 memory_format=torch.contiguous_format $input=4173 #4173=(1,6,8,6,8,192)f32 #4174=(1,6,8,6,8,192)f32 prim::Constant pnnx_2554 0 1 20786 value=4 prim::Constant pnnx_2555 0 1 20787 value=4 prim::ListConstruct pnnx_2556 2 1 20786 20787 4177 prim::Constant pnnx_2557 0 1 20788 value=1 prim::Constant pnnx_2558 0 1 20789 value=2 prim::ListConstruct pnnx_2559 2 1 20788 20789 4178 Tensor.view Tensor.view_1105 2 1 4174 4175 shifted_x.13 $input=4174 $shape=4175 #4174=(1,6,8,6,8,192)f32 #shifted_x.13=(1,48,48,192)f32 aten::mul pnnx_2561 2 1 H.1 W.1 4180 aten::Int pnnx_2562 1 1 4180 4181 prim::ListConstruct pnnx_2563 3 1 4048 4181 4052 4182 prim::Constant pnnx_2565 0 1 4184 value=None prim::Constant pnnx_2566 0 1 20790 value=1 torch.roll torch.roll_2431 3 1 shifted_x.13 4177 4178 x4.25 $input=shifted_x.13 $shifts=4177 $dims=4178 #shifted_x.13=(1,48,48,192)f32 #x4.25=(1,48,48,192)f32 Tensor.view Tensor.view_1106 2 1 x4.25 4182 x5.13 $input=x4.25 $shape=4182 #x4.25=(1,48,48,192)f32 #x5.13=(1,2304,192)f32 aten::add pnnx_2567 3 1 4027 x5.13 20790 input.59 #4027=(1,2304,192)f32 #x5.13=(1,2304,192)f32 #input.59=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.5.norm2 1 1 input.59 4186 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.59=(1,2304,192)f32 #4186=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.5.mlp.fc1 1 1 4186 4191 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4186=(1,2304,192)f32 #4191=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.5.mlp.act 1 1 4191 4192 #4191=(1,2304,384)f32 #4192=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.mlp.drop 1 1 4192 4193 #4192=(1,2304,384)f32 #4193=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.5.mlp.fc2 1 1 4193 4194 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #4193=(1,2304,384)f32 #4194=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.mlp.drop 1 1 4194 4195 #4194=(1,2304,192)f32 #4195=(1,2304,192)f32 prim::Constant pnnx_2568 0 1 4196 value=None prim::Constant pnnx_2569 0 1 20791 value=1 aten::add pnnx_2570 3 1 input.59 4195 20791 4197 #input.59=(1,2304,192)f32 #4195=(1,2304,192)f32 #4197=(1,2304,192)f32 prim::Constant pnnx_2571 0 1 4198 value=0 prim::Constant pnnx_2572 0 1 4199 value=1 prim::Constant pnnx_2573 0 1 4200 value=2 prim::Constant pnnx_2574 0 1 4201 value=192 aten::size pnnx_2575 2 1 4197 4198 4202 #4197=(1,2304,192)f32 prim::NumToTensor pnnx_2576 1 1 4202 B.31 aten::Int pnnx_2577 1 1 B.31 4204 prim::ListConstruct pnnx_2579 4 1 4204 4201 322 562 4206 torch.transpose torch.transpose_2987 3 1 4197 4199 4200 4205 $input=4197 $dim0=4199 $dim1=4200 #4197=(1,2304,192)f32 #4205=(1,192,2304)f32 Tensor.view Tensor.view_1107 2 1 4205 4206 input.61 $input=4205 $shape=4206 #4205=(1,192,2304)f32 #input.61=(1,192,48,48)f32 nn.Conv2d layers_dfe.1.conv 1 1 input.61 4208 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.61=(1,192,48,48)f32 #4208=(1,192,48,48)f32 prim::Constant pnnx_2581 0 1 4209 value=-1 prim::Constant pnnx_2582 0 1 4210 value=2 prim::Constant pnnx_2583 0 1 4211 value=1 prim::Constant pnnx_2585 0 1 20792 value=2 torch.flatten torch.flatten_2185 3 1 4208 4210 4209 4212 $input=4208 $start_dim=4210 $end_dim=4209 #4208=(1,192,48,48)f32 #4212=(1,192,2304)f32 torch.transpose torch.transpose_2988 3 1 4212 4211 20792 4213 $input=4212 $dim0=4211 $dim1=20792 #4212=(1,192,2304)f32 #4213=(1,2304,192)f32 aten::add pnnx_2587 3 1 4213 3232 3233 4214 #4213=(1,2304,192)f32 #3232=(1,2304,192)f32 #4214=(1,2304,192)f32 prim::Constant pnnx_2588 0 1 4215 value=1 prim::Constant pnnx_2589 0 1 4232 value=trunc prim::Constant pnnx_2590 0 1 4233 value=8 prim::Constant pnnx_2591 0 1 4234 value=0 prim::Constant pnnx_2592 0 1 4235 value=2 prim::Constant pnnx_2593 0 1 4236 value=1 prim::Constant pnnx_2594 0 1 4237 value=3 prim::Constant pnnx_2595 0 1 4238 value=8 prim::Constant pnnx_2596 0 1 4239 value=4 prim::Constant pnnx_2597 0 1 4240 value=5 prim::Constant pnnx_2598 0 1 4241 value=-1 prim::Constant pnnx_2599 0 1 4242 value=64 aten::size pnnx_2600 2 1 4214 4234 4248 #4214=(1,2304,192)f32 prim::NumToTensor pnnx_2601 1 1 4248 B.33 aten::Int pnnx_2602 1 1 B.33 4250 aten::Int pnnx_2603 1 1 B.33 4251 aten::size pnnx_2604 2 1 4214 4235 4252 #4214=(1,2304,192)f32 prim::NumToTensor pnnx_2605 1 1 4252 C.59 aten::Int pnnx_2606 1 1 C.59 4254 aten::Int pnnx_2607 1 1 C.59 4255 aten::Int pnnx_2608 1 1 C.59 4256 aten::Int pnnx_2609 1 1 C.59 4257 nn.LayerNorm layers_dfe.2.residual_group.blocks.0.norm1 1 1 4214 4258 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #4214=(1,2304,192)f32 #4258=(1,2304,192)f32 prim::ListConstruct pnnx_2610 4 1 4251 319 559 4257 4259 prim::Constant pnnx_2612 0 1 20793 value=0 Tensor.view Tensor.view_1108 2 1 4258 4259 x.27 $input=4258 $shape=4259 #4258=(1,2304,192)f32 #x.27=(1,48,48,192)f32 aten::size pnnx_2613 2 1 x.27 20793 4261 #x.27=(1,48,48,192)f32 prim::NumToTensor pnnx_2614 1 1 4261 B0.27 aten::Int pnnx_2615 1 1 B0.27 4263 aten::size pnnx_2616 2 1 x.27 4236 4264 #x.27=(1,48,48,192)f32 prim::NumToTensor pnnx_2617 1 1 4264 4265 prim::Constant pnnx_2618 0 1 20794 value=2 aten::size pnnx_2619 2 1 x.27 20794 4266 #x.27=(1,48,48,192)f32 prim::NumToTensor pnnx_2620 1 1 4266 4267 aten::size pnnx_2621 2 1 x.27 4237 4268 #x.27=(1,48,48,192)f32 prim::NumToTensor pnnx_2622 1 1 4268 C0.27 aten::Int pnnx_2623 1 1 C0.27 4270 aten::Int pnnx_2624 1 1 C0.27 4271 aten::div pnnx_2625 3 1 4265 4233 4232 4272 aten::Int pnnx_2626 1 1 4272 4273 prim::Constant pnnx_2627 0 1 20795 value=8 prim::Constant pnnx_2628 0 1 20796 value=trunc aten::div pnnx_2629 3 1 4267 20795 20796 4274 aten::Int pnnx_2630 1 1 4274 4275 prim::Constant pnnx_2631 0 1 20797 value=8 prim::ListConstruct pnnx_2632 6 1 4263 4273 4238 4275 20797 4271 4276 prim::Constant pnnx_2634 0 1 20798 value=0 prim::Constant pnnx_2635 0 1 20799 value=1 prim::Constant pnnx_2636 0 1 20800 value=3 prim::Constant pnnx_2637 0 1 20801 value=2 prim::ListConstruct pnnx_2638 6 1 20798 20799 20800 20801 4239 4240 4278 Tensor.view Tensor.view_1109 2 1 x.27 4276 x0.27 $input=x.27 $shape=4276 #x.27=(1,48,48,192)f32 #x0.27=(1,6,8,6,8,192)f32 prim::Constant pnnx_2642 0 1 20803 value=8 prim::Constant pnnx_2643 0 1 20804 value=8 prim::ListConstruct pnnx_2644 4 1 4241 20803 20804 4270 4281 torch.permute torch.permute_2576 2 1 x0.27 4278 4279 $input=x0.27 $dims=4278 #x0.27=(1,6,8,6,8,192)f32 #4279=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_38 1 1 4279 4280 memory_format=torch.contiguous_format $input=4279 #4279=(1,6,6,8,8,192)f32 #4280=(1,6,6,8,8,192)f32 prim::Constant pnnx_2646 0 1 20805 value=-1 prim::ListConstruct pnnx_2647 3 1 20805 4242 4256 4283 prim::Constant pnnx_2649 0 1 4285 value=1.767767e-01 prim::Constant pnnx_2650 0 1 4286 value=trunc prim::Constant pnnx_2651 0 1 4287 value=6 prim::Constant pnnx_2652 0 1 4288 value=0 prim::Constant pnnx_2653 0 1 4289 value=1 prim::Constant pnnx_2654 0 1 4290 value=2 prim::Constant pnnx_2655 0 1 4291 value=3 prim::Constant pnnx_2656 0 1 4292 value=6 prim::Constant pnnx_2657 0 1 4293 value=4 prim::Constant pnnx_2658 0 1 4294 value=-2 prim::Constant pnnx_2659 0 1 4295 value=-1 prim::Constant pnnx_2660 0 1 4296 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.0.attn 0 1 relative_position_bias_table.27 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.27=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.0.attn 0 1 relative_position_index.27 @relative_position_index=(64,64)i64 #relative_position_index.27=(64,64)i64 Tensor.view Tensor.view_1110 2 1 4280 4281 x_windows.27 $input=4280 $shape=4281 #4280=(1,6,6,8,8,192)f32 #x_windows.27=(36,8,8,192)f32 Tensor.view Tensor.view_1111 2 1 x_windows.27 4283 x1.27 $input=x_windows.27 $shape=4283 #x_windows.27=(36,8,8,192)f32 #x1.27=(36,64,192)f32 aten::size pnnx_2661 2 1 x1.27 4288 4304 #x1.27=(36,64,192)f32 prim::NumToTensor pnnx_2662 1 1 4304 B_.27 aten::Int pnnx_2663 1 1 B_.27 4306 aten::Int pnnx_2664 1 1 B_.27 4307 aten::size pnnx_2665 2 1 x1.27 4289 4308 #x1.27=(36,64,192)f32 prim::NumToTensor pnnx_2666 1 1 4308 N.27 aten::Int pnnx_2667 1 1 N.27 4310 aten::Int pnnx_2668 1 1 N.27 4311 aten::size pnnx_2669 2 1 x1.27 4290 4312 #x1.27=(36,64,192)f32 prim::NumToTensor pnnx_2670 1 1 4312 C.61 aten::Int pnnx_2671 1 1 C.61 4314 nn.Linear layers_dfe.2.residual_group.blocks.0.attn.qkv 1 1 x1.27 4315 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.27=(36,64,192)f32 #4315=(36,64,576)f32 aten::div pnnx_2672 3 1 C.61 4287 4286 4316 aten::Int pnnx_2673 1 1 4316 4317 prim::ListConstruct pnnx_2674 5 1 4307 4311 4291 4292 4317 4318 prim::Constant pnnx_2676 0 1 20806 value=2 prim::Constant pnnx_2677 0 1 20807 value=0 prim::Constant pnnx_2678 0 1 20808 value=3 prim::Constant pnnx_2679 0 1 20809 value=1 prim::ListConstruct pnnx_2680 5 1 20806 20807 20808 20809 4293 4320 Tensor.reshape Tensor.reshape_458 2 1 4315 4318 4319 $input=4315 $shape=4318 #4315=(36,64,576)f32 #4319=(36,64,3,6,32)f32 prim::Constant pnnx_2682 0 1 20810 value=0 prim::Constant pnnx_2683 0 1 20811 value=0 prim::Constant pnnx_2685 0 1 20812 value=0 prim::Constant pnnx_2686 0 1 20813 value=1 prim::Constant pnnx_2688 0 1 20814 value=0 prim::Constant pnnx_2689 0 1 20815 value=2 torch.permute torch.permute_2577 2 1 4319 4320 qkv0.27 $input=4319 $dims=4320 #4319=(36,64,3,6,32)f32 #qkv0.27=(3,36,6,64,32)f32 Tensor.select Tensor.select_686 3 1 qkv0.27 20810 20811 q.27 $input=qkv0.27 $dim=20810 $index=20811 #qkv0.27=(3,36,6,64,32)f32 #q.27=(36,6,64,32)f32 aten::mul pnnx_2691 2 1 q.27 4285 q0.27 #q.27=(36,6,64,32)f32 #q0.27=(36,6,64,32)f32 Tensor.select Tensor.select_687 3 1 qkv0.27 20812 20813 k.27 $input=qkv0.27 $dim=20812 $index=20813 #qkv0.27=(3,36,6,64,32)f32 #k.27=(36,6,64,32)f32 prim::Constant pnnx_2694 0 1 20816 value=-1 prim::ListConstruct pnnx_2695 1 1 20816 4328 Tensor.view Tensor.view_1112 2 1 relative_position_index.27 4328 4329 $input=relative_position_index.27 $shape=4328 #relative_position_index.27=(64,64)i64 #4329=(4096)i64 prim::ListConstruct pnnx_2697 1 1 4329 4330 #4329=(4096)i64 prim::Constant pnnx_2699 0 1 20817 value=64 prim::Constant pnnx_2700 0 1 20818 value=-1 prim::ListConstruct pnnx_2701 3 1 4296 20817 20818 4332 Tensor.index Tensor.index_338 2 1 relative_position_bias_table.27 4330 4331 $input=relative_position_bias_table.27 $expr=4330 #relative_position_bias_table.27=(225,6)f32 #4331=(4096,6)f32 prim::Constant pnnx_2703 0 1 20819 value=2 prim::Constant pnnx_2704 0 1 20820 value=0 prim::Constant pnnx_2705 0 1 20821 value=1 prim::ListConstruct pnnx_2706 3 1 20819 20820 20821 4334 Tensor.view Tensor.view_1113 2 1 4331 4332 relative_position_bias.27 $input=4331 $shape=4332 #4331=(4096,6)f32 #relative_position_bias.27=(64,64,6)f32 prim::Constant pnnx_2710 0 1 20823 value=0 torch.permute torch.permute_2578 2 1 relative_position_bias.27 4334 4335 $input=relative_position_bias.27 $dims=4334 #relative_position_bias.27=(64,64,6)f32 #4335=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_39 1 1 4335 relative_position_bias0.27 memory_format=torch.contiguous_format $input=4335 #4335=(6,64,64)f32 #relative_position_bias0.27=(6,64,64)f32 prim::Constant pnnx_2712 0 1 20824 value=1 torch.transpose torch.transpose_2989 3 1 k.27 4294 4295 4326 $input=k.27 $dim0=4294 $dim1=4295 #k.27=(36,6,64,32)f32 #4326=(36,6,32,64)f32 torch.matmul torch.matmul_2228 2 1 q0.27 4326 attn.55 $input=q0.27 $other=4326 #q0.27=(36,6,64,32)f32 #4326=(36,6,32,64)f32 #attn.55=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3242 2 1 relative_position_bias0.27 20823 4337 $input=relative_position_bias0.27 $dim=20823 #relative_position_bias0.27=(6,64,64)f32 #4337=(1,6,64,64)f32 aten::add pnnx_2713 3 1 attn.55 4337 20824 input.63 #attn.55=(36,6,64,64)f32 #4337=(1,6,64,64)f32 #input.63=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.0.attn.softmax 1 1 input.63 4339 dim=-1 #input.63=(36,6,64,64)f32 #4339=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.attn.attn_drop 1 1 4339 4340 #4339=(36,6,64,64)f32 #4340=(36,6,64,64)f32 Tensor.select Tensor.select_688 3 1 qkv0.27 20814 20815 v.27 $input=qkv0.27 $dim=20814 $index=20815 #qkv0.27=(3,36,6,64,32)f32 #v.27=(36,6,64,32)f32 prim::Constant pnnx_2715 0 1 20825 value=1 prim::Constant pnnx_2716 0 1 20826 value=2 torch.matmul torch.matmul_2229 2 1 4340 v.27 4341 $input=4340 $other=v.27 #4340=(36,6,64,64)f32 #v.27=(36,6,64,32)f32 #4341=(36,6,64,32)f32 prim::ListConstruct pnnx_2718 3 1 4306 4310 4314 4343 torch.transpose torch.transpose_2990 3 1 4341 20825 20826 4342 $input=4341 $dim0=20825 $dim1=20826 #4341=(36,6,64,32)f32 #4342=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_459 2 1 4342 4343 input0.29 $input=4342 $shape=4343 #4342=(36,64,6,32)f32 #input0.29=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.0.attn.proj 1 1 input0.29 4345 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.29=(36,64,192)f32 #4345=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.attn.proj_drop 1 1 4345 4346 #4345=(36,64,192)f32 #4346=(36,64,192)f32 prim::Constant pnnx_2720 0 1 20827 value=-1 prim::Constant pnnx_2721 0 1 20828 value=8 prim::Constant pnnx_2722 0 1 20829 value=8 prim::ListConstruct pnnx_2723 4 1 20827 20828 20829 4255 4347 prim::Constant pnnx_2725 0 1 20830 value=8 prim::Constant pnnx_2726 0 1 20831 value=trunc aten::div pnnx_2727 3 1 H.1 20830 20831 4349 aten::Int pnnx_2728 1 1 4349 4350 prim::Constant pnnx_2729 0 1 20832 value=8 prim::Constant pnnx_2730 0 1 20833 value=trunc aten::div pnnx_2731 3 1 W.1 20832 20833 4351 aten::Int pnnx_2732 1 1 4351 4352 prim::Constant pnnx_2733 0 1 20834 value=1 prim::Constant pnnx_2734 0 1 20835 value=8 prim::Constant pnnx_2735 0 1 20836 value=8 prim::Constant pnnx_2736 0 1 20837 value=-1 prim::ListConstruct pnnx_2737 6 1 20834 4350 4352 20835 20836 20837 4353 prim::Constant pnnx_2739 0 1 20838 value=0 prim::Constant pnnx_2740 0 1 20839 value=1 prim::Constant pnnx_2741 0 1 20840 value=3 prim::Constant pnnx_2742 0 1 20841 value=2 prim::Constant pnnx_2743 0 1 20842 value=4 prim::Constant pnnx_2744 0 1 20843 value=5 prim::ListConstruct pnnx_2745 6 1 20838 20839 20840 20841 20842 20843 4355 Tensor.view Tensor.view_1114 2 1 4346 4347 windows.27 $input=4346 $shape=4347 #4346=(36,64,192)f32 #windows.27=(36,8,8,192)f32 Tensor.view Tensor.view_1115 2 1 windows.27 4353 x2.27 $input=windows.27 $shape=4353 #windows.27=(36,8,8,192)f32 #x2.27=(1,6,6,8,8,192)f32 prim::Constant pnnx_2749 0 1 20845 value=1 prim::Constant pnnx_2750 0 1 20846 value=-1 prim::ListConstruct pnnx_2751 4 1 20845 316 556 20846 4358 torch.permute torch.permute_2579 2 1 x2.27 4355 4356 $input=x2.27 $dims=4355 #x2.27=(1,6,6,8,8,192)f32 #4356=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_40 1 1 4356 4357 memory_format=torch.contiguous_format $input=4356 #4356=(1,6,8,6,8,192)f32 #4357=(1,6,8,6,8,192)f32 aten::mul pnnx_2753 2 1 H.1 W.1 4360 aten::Int pnnx_2754 1 1 4360 4361 prim::ListConstruct pnnx_2755 3 1 4250 4361 4254 4362 prim::Constant pnnx_2757 0 1 4364 value=None prim::Constant pnnx_2758 0 1 20847 value=1 Tensor.view Tensor.view_1116 2 1 4357 4358 x3.27 $input=4357 $shape=4358 #4357=(1,6,8,6,8,192)f32 #x3.27=(1,48,48,192)f32 Tensor.view Tensor.view_1117 2 1 x3.27 4362 x4.27 $input=x3.27 $shape=4362 #x3.27=(1,48,48,192)f32 #x4.27=(1,2304,192)f32 aten::add pnnx_2759 3 1 4214 x4.27 20847 input.65 #4214=(1,2304,192)f32 #x4.27=(1,2304,192)f32 #input.65=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.0.norm2 1 1 input.65 4366 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.65=(1,2304,192)f32 #4366=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.0.mlp.fc1 1 1 4366 4371 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4366=(1,2304,192)f32 #4371=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.0.mlp.act 1 1 4371 4372 #4371=(1,2304,384)f32 #4372=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.mlp.drop 1 1 4372 4373 #4372=(1,2304,384)f32 #4373=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.0.mlp.fc2 1 1 4373 4374 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #4373=(1,2304,384)f32 #4374=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.mlp.drop 1 1 4374 4375 #4374=(1,2304,192)f32 #4375=(1,2304,192)f32 prim::Constant pnnx_2760 0 1 4376 value=None prim::Constant pnnx_2761 0 1 20848 value=1 aten::add pnnx_2762 3 1 input.65 4375 20848 4377 #input.65=(1,2304,192)f32 #4375=(1,2304,192)f32 #4377=(1,2304,192)f32 prim::Constant pnnx_2763 0 1 4378 value=trunc prim::Constant pnnx_2764 0 1 4379 value=8 prim::Constant pnnx_2765 0 1 4380 value=0 prim::Constant pnnx_2766 0 1 4381 value=2 prim::Constant pnnx_2767 0 1 4382 value=-4 prim::Constant pnnx_2768 0 1 4383 value=1 prim::Constant pnnx_2769 0 1 4384 value=3 prim::Constant pnnx_2770 0 1 4385 value=8 prim::Constant pnnx_2771 0 1 4386 value=4 prim::Constant pnnx_2772 0 1 4387 value=5 prim::Constant pnnx_2773 0 1 4388 value=-1 prim::Constant pnnx_2774 0 1 4389 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.1 0 1 attn_mask.15 @attn_mask=(36,64,64)f32 #attn_mask.15=(36,64,64)f32 aten::size pnnx_2775 2 1 4377 4380 4396 #4377=(1,2304,192)f32 prim::NumToTensor pnnx_2776 1 1 4396 B.35 aten::Int pnnx_2777 1 1 B.35 4398 aten::Int pnnx_2778 1 1 B.35 4399 aten::size pnnx_2779 2 1 4377 4381 4400 #4377=(1,2304,192)f32 prim::NumToTensor pnnx_2780 1 1 4400 C.63 aten::Int pnnx_2781 1 1 C.63 4402 aten::Int pnnx_2782 1 1 C.63 4403 aten::Int pnnx_2783 1 1 C.63 4404 aten::Int pnnx_2784 1 1 C.63 4405 nn.LayerNorm layers_dfe.2.residual_group.blocks.1.norm1 1 1 4377 4406 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #4377=(1,2304,192)f32 #4406=(1,2304,192)f32 prim::ListConstruct pnnx_2785 4 1 4399 313 553 4405 4407 prim::Constant pnnx_2787 0 1 20849 value=-4 prim::ListConstruct pnnx_2788 2 1 4382 20849 4409 prim::Constant pnnx_2789 0 1 20850 value=2 prim::ListConstruct pnnx_2790 2 1 4383 20850 4410 Tensor.view Tensor.view_1118 2 1 4406 4407 x.29 $input=4406 $shape=4407 #4406=(1,2304,192)f32 #x.29=(1,48,48,192)f32 prim::Constant pnnx_2792 0 1 20851 value=0 torch.roll torch.roll_2432 3 1 x.29 4409 4410 x0.29 $input=x.29 $shifts=4409 $dims=4410 #x.29=(1,48,48,192)f32 #x0.29=(1,48,48,192)f32 aten::size pnnx_2793 2 1 x0.29 20851 4412 #x0.29=(1,48,48,192)f32 prim::NumToTensor pnnx_2794 1 1 4412 B0.29 aten::Int pnnx_2795 1 1 B0.29 4414 prim::Constant pnnx_2796 0 1 20852 value=1 aten::size pnnx_2797 2 1 x0.29 20852 4415 #x0.29=(1,48,48,192)f32 prim::NumToTensor pnnx_2798 1 1 4415 4416 prim::Constant pnnx_2799 0 1 20853 value=2 aten::size pnnx_2800 2 1 x0.29 20853 4417 #x0.29=(1,48,48,192)f32 prim::NumToTensor pnnx_2801 1 1 4417 4418 aten::size pnnx_2802 2 1 x0.29 4384 4419 #x0.29=(1,48,48,192)f32 prim::NumToTensor pnnx_2803 1 1 4419 C0.29 aten::Int pnnx_2804 1 1 C0.29 4421 aten::Int pnnx_2805 1 1 C0.29 4422 aten::div pnnx_2806 3 1 4416 4379 4378 4423 aten::Int pnnx_2807 1 1 4423 4424 prim::Constant pnnx_2808 0 1 20854 value=8 prim::Constant pnnx_2809 0 1 20855 value=trunc aten::div pnnx_2810 3 1 4418 20854 20855 4425 aten::Int pnnx_2811 1 1 4425 4426 prim::Constant pnnx_2812 0 1 20856 value=8 prim::ListConstruct pnnx_2813 6 1 4414 4424 4385 4426 20856 4422 4427 prim::Constant pnnx_2815 0 1 20857 value=0 prim::Constant pnnx_2816 0 1 20858 value=1 prim::Constant pnnx_2817 0 1 20859 value=3 prim::Constant pnnx_2818 0 1 20860 value=2 prim::ListConstruct pnnx_2819 6 1 20857 20858 20859 20860 4386 4387 4429 Tensor.view Tensor.view_1119 2 1 x0.29 4427 x1.29 $input=x0.29 $shape=4427 #x0.29=(1,48,48,192)f32 #x1.29=(1,6,8,6,8,192)f32 prim::Constant pnnx_2823 0 1 20862 value=8 prim::Constant pnnx_2824 0 1 20863 value=8 prim::ListConstruct pnnx_2825 4 1 4388 20862 20863 4421 4432 torch.permute torch.permute_2580 2 1 x1.29 4429 4430 $input=x1.29 $dims=4429 #x1.29=(1,6,8,6,8,192)f32 #4430=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_41 1 1 4430 4431 memory_format=torch.contiguous_format $input=4430 #4430=(1,6,6,8,8,192)f32 #4431=(1,6,6,8,8,192)f32 prim::Constant pnnx_2827 0 1 20864 value=-1 prim::ListConstruct pnnx_2828 3 1 20864 4389 4404 4434 prim::Constant pnnx_2830 0 1 4436 value=1.767767e-01 prim::Constant pnnx_2831 0 1 4437 value=trunc prim::Constant pnnx_2832 0 1 4438 value=6 prim::Constant pnnx_2833 0 1 4439 value=0 prim::Constant pnnx_2834 0 1 4440 value=1 prim::Constant pnnx_2835 0 1 4441 value=2 prim::Constant pnnx_2836 0 1 4442 value=3 prim::Constant pnnx_2837 0 1 4443 value=6 prim::Constant pnnx_2838 0 1 4444 value=4 prim::Constant pnnx_2839 0 1 4445 value=-2 prim::Constant pnnx_2840 0 1 4446 value=-1 prim::Constant pnnx_2841 0 1 4447 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.1.attn 0 1 relative_position_bias_table.29 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.29=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.1.attn 0 1 relative_position_index.29 @relative_position_index=(64,64)i64 #relative_position_index.29=(64,64)i64 Tensor.view Tensor.view_1120 2 1 4431 4432 x_windows.29 $input=4431 $shape=4432 #4431=(1,6,6,8,8,192)f32 #x_windows.29=(36,8,8,192)f32 Tensor.view Tensor.view_1121 2 1 x_windows.29 4434 x2.29 $input=x_windows.29 $shape=4434 #x_windows.29=(36,8,8,192)f32 #x2.29=(36,64,192)f32 aten::size pnnx_2842 2 1 x2.29 4439 4455 #x2.29=(36,64,192)f32 prim::NumToTensor pnnx_2843 1 1 4455 B_.29 aten::Int pnnx_2844 1 1 B_.29 4457 aten::Int pnnx_2845 1 1 B_.29 4458 aten::size pnnx_2846 2 1 x2.29 4440 4459 #x2.29=(36,64,192)f32 prim::NumToTensor pnnx_2847 1 1 4459 N.29 aten::Int pnnx_2848 1 1 N.29 4461 aten::Int pnnx_2849 1 1 N.29 4462 aten::Int pnnx_2850 1 1 N.29 4463 aten::Int pnnx_2851 1 1 N.29 4464 aten::Int pnnx_2852 1 1 N.29 4465 aten::Int pnnx_2853 1 1 N.29 4466 aten::size pnnx_2854 2 1 x2.29 4441 4467 #x2.29=(36,64,192)f32 prim::NumToTensor pnnx_2855 1 1 4467 C.65 aten::Int pnnx_2856 1 1 C.65 4469 nn.Linear layers_dfe.2.residual_group.blocks.1.attn.qkv 1 1 x2.29 4470 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.29=(36,64,192)f32 #4470=(36,64,576)f32 aten::div pnnx_2857 3 1 C.65 4438 4437 4471 aten::Int pnnx_2858 1 1 4471 4472 prim::ListConstruct pnnx_2859 5 1 4458 4466 4442 4443 4472 4473 prim::Constant pnnx_2861 0 1 20865 value=2 prim::Constant pnnx_2862 0 1 20866 value=0 prim::Constant pnnx_2863 0 1 20867 value=3 prim::Constant pnnx_2864 0 1 20868 value=1 prim::ListConstruct pnnx_2865 5 1 20865 20866 20867 20868 4444 4475 Tensor.reshape Tensor.reshape_460 2 1 4470 4473 4474 $input=4470 $shape=4473 #4470=(36,64,576)f32 #4474=(36,64,3,6,32)f32 prim::Constant pnnx_2867 0 1 20869 value=0 prim::Constant pnnx_2868 0 1 20870 value=0 prim::Constant pnnx_2870 0 1 20871 value=0 prim::Constant pnnx_2871 0 1 20872 value=1 prim::Constant pnnx_2873 0 1 20873 value=0 prim::Constant pnnx_2874 0 1 20874 value=2 torch.permute torch.permute_2581 2 1 4474 4475 qkv0.29 $input=4474 $dims=4475 #4474=(36,64,3,6,32)f32 #qkv0.29=(3,36,6,64,32)f32 Tensor.select Tensor.select_689 3 1 qkv0.29 20869 20870 q.29 $input=qkv0.29 $dim=20869 $index=20870 #qkv0.29=(3,36,6,64,32)f32 #q.29=(36,6,64,32)f32 aten::mul pnnx_2876 2 1 q.29 4436 q0.29 #q.29=(36,6,64,32)f32 #q0.29=(36,6,64,32)f32 Tensor.select Tensor.select_690 3 1 qkv0.29 20871 20872 k.29 $input=qkv0.29 $dim=20871 $index=20872 #qkv0.29=(3,36,6,64,32)f32 #k.29=(36,6,64,32)f32 prim::Constant pnnx_2879 0 1 20875 value=-1 prim::ListConstruct pnnx_2880 1 1 20875 4483 Tensor.view Tensor.view_1122 2 1 relative_position_index.29 4483 4484 $input=relative_position_index.29 $shape=4483 #relative_position_index.29=(64,64)i64 #4484=(4096)i64 prim::ListConstruct pnnx_2882 1 1 4484 4485 #4484=(4096)i64 prim::Constant pnnx_2884 0 1 20876 value=64 prim::Constant pnnx_2885 0 1 20877 value=-1 prim::ListConstruct pnnx_2886 3 1 4447 20876 20877 4487 Tensor.index Tensor.index_339 2 1 relative_position_bias_table.29 4485 4486 $input=relative_position_bias_table.29 $expr=4485 #relative_position_bias_table.29=(225,6)f32 #4486=(4096,6)f32 prim::Constant pnnx_2888 0 1 20878 value=2 prim::Constant pnnx_2889 0 1 20879 value=0 prim::Constant pnnx_2890 0 1 20880 value=1 prim::ListConstruct pnnx_2891 3 1 20878 20879 20880 4489 Tensor.view Tensor.view_1123 2 1 4486 4487 relative_position_bias.29 $input=4486 $shape=4487 #4486=(4096,6)f32 #relative_position_bias.29=(64,64,6)f32 prim::Constant pnnx_2895 0 1 20882 value=0 torch.permute torch.permute_2582 2 1 relative_position_bias.29 4489 4490 $input=relative_position_bias.29 $dims=4489 #relative_position_bias.29=(64,64,6)f32 #4490=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_42 1 1 4490 relative_position_bias0.29 memory_format=torch.contiguous_format $input=4490 #4490=(6,64,64)f32 #relative_position_bias0.29=(6,64,64)f32 prim::Constant pnnx_2897 0 1 20883 value=1 torch.transpose torch.transpose_2991 3 1 k.29 4445 4446 4481 $input=k.29 $dim0=4445 $dim1=4446 #k.29=(36,6,64,32)f32 #4481=(36,6,32,64)f32 torch.matmul torch.matmul_2230 2 1 q0.29 4481 attn.59 $input=q0.29 $other=4481 #q0.29=(36,6,64,32)f32 #4481=(36,6,32,64)f32 #attn.59=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3243 2 1 relative_position_bias0.29 20882 4492 $input=relative_position_bias0.29 $dim=20882 #relative_position_bias0.29=(6,64,64)f32 #4492=(1,6,64,64)f32 aten::add pnnx_2898 3 1 attn.59 4492 20883 attn0.15 #attn.59=(36,6,64,64)f32 #4492=(1,6,64,64)f32 #attn0.15=(36,6,64,64)f32 prim::Constant pnnx_2899 0 1 20884 value=0 aten::size pnnx_2900 2 1 attn_mask.15 20884 4494 #attn_mask.15=(36,64,64)f32 prim::NumToTensor pnnx_2901 1 1 4494 other.15 aten::Int pnnx_2902 1 1 other.15 4496 prim::Constant pnnx_2903 0 1 20885 value=trunc aten::div pnnx_2904 3 1 B_.29 other.15 20885 4497 aten::Int pnnx_2905 1 1 4497 4498 prim::Constant pnnx_2906 0 1 20886 value=6 prim::ListConstruct pnnx_2907 5 1 4498 4496 20886 4465 4464 4499 prim::Constant pnnx_2909 0 1 20887 value=1 prim::Constant pnnx_2911 0 1 20888 value=0 prim::Constant pnnx_2913 0 1 20889 value=1 Tensor.view Tensor.view_1124 2 1 attn0.15 4499 4500 $input=attn0.15 $shape=4499 #attn0.15=(36,6,64,64)f32 #4500=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3244 2 1 attn_mask.15 20887 4501 $input=attn_mask.15 $dim=20887 #attn_mask.15=(36,64,64)f32 #4501=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3245 2 1 4501 20888 4502 $input=4501 $dim=20888 #4501=(36,1,64,64)f32 #4502=(1,36,1,64,64)f32 aten::add pnnx_2914 3 1 4500 4502 20889 attn1.15 #4500=(1,36,6,64,64)f32 #4502=(1,36,1,64,64)f32 #attn1.15=(1,36,6,64,64)f32 prim::Constant pnnx_2915 0 1 20890 value=-1 prim::Constant pnnx_2916 0 1 20891 value=6 prim::ListConstruct pnnx_2917 4 1 20890 20891 4463 4462 4504 Tensor.view Tensor.view_1125 2 1 attn1.15 4504 input.67 $input=attn1.15 $shape=4504 #attn1.15=(1,36,6,64,64)f32 #input.67=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.1.attn.softmax 1 1 input.67 4506 dim=-1 #input.67=(36,6,64,64)f32 #4506=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.attn.attn_drop 1 1 4506 4507 #4506=(36,6,64,64)f32 #4507=(36,6,64,64)f32 Tensor.select Tensor.select_691 3 1 qkv0.29 20873 20874 v.29 $input=qkv0.29 $dim=20873 $index=20874 #qkv0.29=(3,36,6,64,32)f32 #v.29=(36,6,64,32)f32 prim::Constant pnnx_2920 0 1 20892 value=1 prim::Constant pnnx_2921 0 1 20893 value=2 torch.matmul torch.matmul_2231 2 1 4507 v.29 4508 $input=4507 $other=v.29 #4507=(36,6,64,64)f32 #v.29=(36,6,64,32)f32 #4508=(36,6,64,32)f32 prim::ListConstruct pnnx_2923 3 1 4457 4461 4469 4510 torch.transpose torch.transpose_2992 3 1 4508 20892 20893 4509 $input=4508 $dim0=20892 $dim1=20893 #4508=(36,6,64,32)f32 #4509=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_461 2 1 4509 4510 input0.31 $input=4509 $shape=4510 #4509=(36,64,6,32)f32 #input0.31=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.1.attn.proj 1 1 input0.31 4512 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.31=(36,64,192)f32 #4512=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.attn.proj_drop 1 1 4512 4513 #4512=(36,64,192)f32 #4513=(36,64,192)f32 prim::Constant pnnx_2925 0 1 20894 value=-1 prim::Constant pnnx_2926 0 1 20895 value=8 prim::Constant pnnx_2927 0 1 20896 value=8 prim::ListConstruct pnnx_2928 4 1 20894 20895 20896 4403 4514 prim::Constant pnnx_2930 0 1 20897 value=8 prim::Constant pnnx_2931 0 1 20898 value=trunc aten::div pnnx_2932 3 1 H.1 20897 20898 4516 aten::Int pnnx_2933 1 1 4516 4517 prim::Constant pnnx_2934 0 1 20899 value=8 prim::Constant pnnx_2935 0 1 20900 value=trunc aten::div pnnx_2936 3 1 W.1 20899 20900 4518 aten::Int pnnx_2937 1 1 4518 4519 prim::Constant pnnx_2938 0 1 20901 value=1 prim::Constant pnnx_2939 0 1 20902 value=8 prim::Constant pnnx_2940 0 1 20903 value=8 prim::Constant pnnx_2941 0 1 20904 value=-1 prim::ListConstruct pnnx_2942 6 1 20901 4517 4519 20902 20903 20904 4520 prim::Constant pnnx_2944 0 1 20905 value=0 prim::Constant pnnx_2945 0 1 20906 value=1 prim::Constant pnnx_2946 0 1 20907 value=3 prim::Constant pnnx_2947 0 1 20908 value=2 prim::Constant pnnx_2948 0 1 20909 value=4 prim::Constant pnnx_2949 0 1 20910 value=5 prim::ListConstruct pnnx_2950 6 1 20905 20906 20907 20908 20909 20910 4522 Tensor.view Tensor.view_1126 2 1 4513 4514 windows.29 $input=4513 $shape=4514 #4513=(36,64,192)f32 #windows.29=(36,8,8,192)f32 Tensor.view Tensor.view_1127 2 1 windows.29 4520 x3.29 $input=windows.29 $shape=4520 #windows.29=(36,8,8,192)f32 #x3.29=(1,6,6,8,8,192)f32 prim::Constant pnnx_2954 0 1 20912 value=1 prim::Constant pnnx_2955 0 1 20913 value=-1 prim::ListConstruct pnnx_2956 4 1 20912 310 550 20913 4525 torch.permute torch.permute_2583 2 1 x3.29 4522 4523 $input=x3.29 $dims=4522 #x3.29=(1,6,6,8,8,192)f32 #4523=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_43 1 1 4523 4524 memory_format=torch.contiguous_format $input=4523 #4523=(1,6,8,6,8,192)f32 #4524=(1,6,8,6,8,192)f32 prim::Constant pnnx_2958 0 1 20914 value=4 prim::Constant pnnx_2959 0 1 20915 value=4 prim::ListConstruct pnnx_2960 2 1 20914 20915 4527 prim::Constant pnnx_2961 0 1 20916 value=1 prim::Constant pnnx_2962 0 1 20917 value=2 prim::ListConstruct pnnx_2963 2 1 20916 20917 4528 Tensor.view Tensor.view_1128 2 1 4524 4525 shifted_x.15 $input=4524 $shape=4525 #4524=(1,6,8,6,8,192)f32 #shifted_x.15=(1,48,48,192)f32 aten::mul pnnx_2965 2 1 H.1 W.1 4530 aten::Int pnnx_2966 1 1 4530 4531 prim::ListConstruct pnnx_2967 3 1 4398 4531 4402 4532 prim::Constant pnnx_2969 0 1 4534 value=None prim::Constant pnnx_2970 0 1 20918 value=1 torch.roll torch.roll_2433 3 1 shifted_x.15 4527 4528 x4.29 $input=shifted_x.15 $shifts=4527 $dims=4528 #shifted_x.15=(1,48,48,192)f32 #x4.29=(1,48,48,192)f32 Tensor.view Tensor.view_1129 2 1 x4.29 4532 x5.15 $input=x4.29 $shape=4532 #x4.29=(1,48,48,192)f32 #x5.15=(1,2304,192)f32 aten::add pnnx_2971 3 1 4377 x5.15 20918 input.69 #4377=(1,2304,192)f32 #x5.15=(1,2304,192)f32 #input.69=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.1.norm2 1 1 input.69 4536 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.69=(1,2304,192)f32 #4536=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.1.mlp.fc1 1 1 4536 4541 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4536=(1,2304,192)f32 #4541=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.1.mlp.act 1 1 4541 4542 #4541=(1,2304,384)f32 #4542=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.mlp.drop 1 1 4542 4543 #4542=(1,2304,384)f32 #4543=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.1.mlp.fc2 1 1 4543 4544 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #4543=(1,2304,384)f32 #4544=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.mlp.drop 1 1 4544 4545 #4544=(1,2304,192)f32 #4545=(1,2304,192)f32 prim::Constant pnnx_2972 0 1 4546 value=None prim::Constant pnnx_2973 0 1 20919 value=1 aten::add pnnx_2974 3 1 input.69 4545 20919 4547 #input.69=(1,2304,192)f32 #4545=(1,2304,192)f32 #4547=(1,2304,192)f32 prim::Constant pnnx_2975 0 1 4548 value=trunc prim::Constant pnnx_2976 0 1 4549 value=8 prim::Constant pnnx_2977 0 1 4550 value=0 prim::Constant pnnx_2978 0 1 4551 value=2 prim::Constant pnnx_2979 0 1 4552 value=1 prim::Constant pnnx_2980 0 1 4553 value=3 prim::Constant pnnx_2981 0 1 4554 value=8 prim::Constant pnnx_2982 0 1 4555 value=4 prim::Constant pnnx_2983 0 1 4556 value=5 prim::Constant pnnx_2984 0 1 4557 value=-1 prim::Constant pnnx_2985 0 1 4558 value=64 aten::size pnnx_2986 2 1 4547 4550 4564 #4547=(1,2304,192)f32 prim::NumToTensor pnnx_2987 1 1 4564 B.37 aten::Int pnnx_2988 1 1 B.37 4566 aten::Int pnnx_2989 1 1 B.37 4567 aten::size pnnx_2990 2 1 4547 4551 4568 #4547=(1,2304,192)f32 prim::NumToTensor pnnx_2991 1 1 4568 C.67 aten::Int pnnx_2992 1 1 C.67 4570 aten::Int pnnx_2993 1 1 C.67 4571 aten::Int pnnx_2994 1 1 C.67 4572 aten::Int pnnx_2995 1 1 C.67 4573 nn.LayerNorm layers_dfe.2.residual_group.blocks.2.norm1 1 1 4547 4574 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #4547=(1,2304,192)f32 #4574=(1,2304,192)f32 prim::ListConstruct pnnx_2996 4 1 4567 307 547 4573 4575 prim::Constant pnnx_2998 0 1 20920 value=0 Tensor.view Tensor.view_1130 2 1 4574 4575 x.31 $input=4574 $shape=4575 #4574=(1,2304,192)f32 #x.31=(1,48,48,192)f32 aten::size pnnx_2999 2 1 x.31 20920 4577 #x.31=(1,48,48,192)f32 prim::NumToTensor pnnx_3000 1 1 4577 B0.31 aten::Int pnnx_3001 1 1 B0.31 4579 aten::size pnnx_3002 2 1 x.31 4552 4580 #x.31=(1,48,48,192)f32 prim::NumToTensor pnnx_3003 1 1 4580 4581 prim::Constant pnnx_3004 0 1 20921 value=2 aten::size pnnx_3005 2 1 x.31 20921 4582 #x.31=(1,48,48,192)f32 prim::NumToTensor pnnx_3006 1 1 4582 4583 aten::size pnnx_3007 2 1 x.31 4553 4584 #x.31=(1,48,48,192)f32 prim::NumToTensor pnnx_3008 1 1 4584 C0.31 aten::Int pnnx_3009 1 1 C0.31 4586 aten::Int pnnx_3010 1 1 C0.31 4587 aten::div pnnx_3011 3 1 4581 4549 4548 4588 aten::Int pnnx_3012 1 1 4588 4589 prim::Constant pnnx_3013 0 1 20922 value=8 prim::Constant pnnx_3014 0 1 20923 value=trunc aten::div pnnx_3015 3 1 4583 20922 20923 4590 aten::Int pnnx_3016 1 1 4590 4591 prim::Constant pnnx_3017 0 1 20924 value=8 prim::ListConstruct pnnx_3018 6 1 4579 4589 4554 4591 20924 4587 4592 prim::Constant pnnx_3020 0 1 20925 value=0 prim::Constant pnnx_3021 0 1 20926 value=1 prim::Constant pnnx_3022 0 1 20927 value=3 prim::Constant pnnx_3023 0 1 20928 value=2 prim::ListConstruct pnnx_3024 6 1 20925 20926 20927 20928 4555 4556 4594 Tensor.view Tensor.view_1131 2 1 x.31 4592 x0.31 $input=x.31 $shape=4592 #x.31=(1,48,48,192)f32 #x0.31=(1,6,8,6,8,192)f32 prim::Constant pnnx_3028 0 1 20930 value=8 prim::Constant pnnx_3029 0 1 20931 value=8 prim::ListConstruct pnnx_3030 4 1 4557 20930 20931 4586 4597 torch.permute torch.permute_2584 2 1 x0.31 4594 4595 $input=x0.31 $dims=4594 #x0.31=(1,6,8,6,8,192)f32 #4595=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_44 1 1 4595 4596 memory_format=torch.contiguous_format $input=4595 #4595=(1,6,6,8,8,192)f32 #4596=(1,6,6,8,8,192)f32 prim::Constant pnnx_3032 0 1 20932 value=-1 prim::ListConstruct pnnx_3033 3 1 20932 4558 4572 4599 prim::Constant pnnx_3035 0 1 4601 value=1.767767e-01 prim::Constant pnnx_3036 0 1 4602 value=trunc prim::Constant pnnx_3037 0 1 4603 value=6 prim::Constant pnnx_3038 0 1 4604 value=0 prim::Constant pnnx_3039 0 1 4605 value=1 prim::Constant pnnx_3040 0 1 4606 value=2 prim::Constant pnnx_3041 0 1 4607 value=3 prim::Constant pnnx_3042 0 1 4608 value=6 prim::Constant pnnx_3043 0 1 4609 value=4 prim::Constant pnnx_3044 0 1 4610 value=-2 prim::Constant pnnx_3045 0 1 4611 value=-1 prim::Constant pnnx_3046 0 1 4612 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.2.attn 0 1 relative_position_bias_table.31 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.31=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.2.attn 0 1 relative_position_index.31 @relative_position_index=(64,64)i64 #relative_position_index.31=(64,64)i64 Tensor.view Tensor.view_1132 2 1 4596 4597 x_windows.31 $input=4596 $shape=4597 #4596=(1,6,6,8,8,192)f32 #x_windows.31=(36,8,8,192)f32 Tensor.view Tensor.view_1133 2 1 x_windows.31 4599 x1.31 $input=x_windows.31 $shape=4599 #x_windows.31=(36,8,8,192)f32 #x1.31=(36,64,192)f32 aten::size pnnx_3047 2 1 x1.31 4604 4620 #x1.31=(36,64,192)f32 prim::NumToTensor pnnx_3048 1 1 4620 B_.31 aten::Int pnnx_3049 1 1 B_.31 4622 aten::Int pnnx_3050 1 1 B_.31 4623 aten::size pnnx_3051 2 1 x1.31 4605 4624 #x1.31=(36,64,192)f32 prim::NumToTensor pnnx_3052 1 1 4624 N.31 aten::Int pnnx_3053 1 1 N.31 4626 aten::Int pnnx_3054 1 1 N.31 4627 aten::size pnnx_3055 2 1 x1.31 4606 4628 #x1.31=(36,64,192)f32 prim::NumToTensor pnnx_3056 1 1 4628 C.69 aten::Int pnnx_3057 1 1 C.69 4630 nn.Linear layers_dfe.2.residual_group.blocks.2.attn.qkv 1 1 x1.31 4631 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.31=(36,64,192)f32 #4631=(36,64,576)f32 aten::div pnnx_3058 3 1 C.69 4603 4602 4632 aten::Int pnnx_3059 1 1 4632 4633 prim::ListConstruct pnnx_3060 5 1 4623 4627 4607 4608 4633 4634 prim::Constant pnnx_3062 0 1 20933 value=2 prim::Constant pnnx_3063 0 1 20934 value=0 prim::Constant pnnx_3064 0 1 20935 value=3 prim::Constant pnnx_3065 0 1 20936 value=1 prim::ListConstruct pnnx_3066 5 1 20933 20934 20935 20936 4609 4636 Tensor.reshape Tensor.reshape_462 2 1 4631 4634 4635 $input=4631 $shape=4634 #4631=(36,64,576)f32 #4635=(36,64,3,6,32)f32 prim::Constant pnnx_3068 0 1 20937 value=0 prim::Constant pnnx_3069 0 1 20938 value=0 prim::Constant pnnx_3071 0 1 20939 value=0 prim::Constant pnnx_3072 0 1 20940 value=1 prim::Constant pnnx_3074 0 1 20941 value=0 prim::Constant pnnx_3075 0 1 20942 value=2 torch.permute torch.permute_2585 2 1 4635 4636 qkv0.31 $input=4635 $dims=4636 #4635=(36,64,3,6,32)f32 #qkv0.31=(3,36,6,64,32)f32 Tensor.select Tensor.select_692 3 1 qkv0.31 20937 20938 q.31 $input=qkv0.31 $dim=20937 $index=20938 #qkv0.31=(3,36,6,64,32)f32 #q.31=(36,6,64,32)f32 aten::mul pnnx_3077 2 1 q.31 4601 q0.31 #q.31=(36,6,64,32)f32 #q0.31=(36,6,64,32)f32 Tensor.select Tensor.select_693 3 1 qkv0.31 20939 20940 k.31 $input=qkv0.31 $dim=20939 $index=20940 #qkv0.31=(3,36,6,64,32)f32 #k.31=(36,6,64,32)f32 prim::Constant pnnx_3080 0 1 20943 value=-1 prim::ListConstruct pnnx_3081 1 1 20943 4644 Tensor.view Tensor.view_1134 2 1 relative_position_index.31 4644 4645 $input=relative_position_index.31 $shape=4644 #relative_position_index.31=(64,64)i64 #4645=(4096)i64 prim::ListConstruct pnnx_3083 1 1 4645 4646 #4645=(4096)i64 prim::Constant pnnx_3085 0 1 20944 value=64 prim::Constant pnnx_3086 0 1 20945 value=-1 prim::ListConstruct pnnx_3087 3 1 4612 20944 20945 4648 Tensor.index Tensor.index_340 2 1 relative_position_bias_table.31 4646 4647 $input=relative_position_bias_table.31 $expr=4646 #relative_position_bias_table.31=(225,6)f32 #4647=(4096,6)f32 prim::Constant pnnx_3089 0 1 20946 value=2 prim::Constant pnnx_3090 0 1 20947 value=0 prim::Constant pnnx_3091 0 1 20948 value=1 prim::ListConstruct pnnx_3092 3 1 20946 20947 20948 4650 Tensor.view Tensor.view_1135 2 1 4647 4648 relative_position_bias.31 $input=4647 $shape=4648 #4647=(4096,6)f32 #relative_position_bias.31=(64,64,6)f32 prim::Constant pnnx_3096 0 1 20950 value=0 torch.permute torch.permute_2586 2 1 relative_position_bias.31 4650 4651 $input=relative_position_bias.31 $dims=4650 #relative_position_bias.31=(64,64,6)f32 #4651=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_45 1 1 4651 relative_position_bias0.31 memory_format=torch.contiguous_format $input=4651 #4651=(6,64,64)f32 #relative_position_bias0.31=(6,64,64)f32 prim::Constant pnnx_3098 0 1 20951 value=1 torch.transpose torch.transpose_2993 3 1 k.31 4610 4611 4642 $input=k.31 $dim0=4610 $dim1=4611 #k.31=(36,6,64,32)f32 #4642=(36,6,32,64)f32 torch.matmul torch.matmul_2232 2 1 q0.31 4642 attn.63 $input=q0.31 $other=4642 #q0.31=(36,6,64,32)f32 #4642=(36,6,32,64)f32 #attn.63=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3246 2 1 relative_position_bias0.31 20950 4653 $input=relative_position_bias0.31 $dim=20950 #relative_position_bias0.31=(6,64,64)f32 #4653=(1,6,64,64)f32 aten::add pnnx_3099 3 1 attn.63 4653 20951 input.71 #attn.63=(36,6,64,64)f32 #4653=(1,6,64,64)f32 #input.71=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.2.attn.softmax 1 1 input.71 4655 dim=-1 #input.71=(36,6,64,64)f32 #4655=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.attn.attn_drop 1 1 4655 4656 #4655=(36,6,64,64)f32 #4656=(36,6,64,64)f32 Tensor.select Tensor.select_694 3 1 qkv0.31 20941 20942 v.31 $input=qkv0.31 $dim=20941 $index=20942 #qkv0.31=(3,36,6,64,32)f32 #v.31=(36,6,64,32)f32 prim::Constant pnnx_3101 0 1 20952 value=1 prim::Constant pnnx_3102 0 1 20953 value=2 torch.matmul torch.matmul_2233 2 1 4656 v.31 4657 $input=4656 $other=v.31 #4656=(36,6,64,64)f32 #v.31=(36,6,64,32)f32 #4657=(36,6,64,32)f32 prim::ListConstruct pnnx_3104 3 1 4622 4626 4630 4659 torch.transpose torch.transpose_2994 3 1 4657 20952 20953 4658 $input=4657 $dim0=20952 $dim1=20953 #4657=(36,6,64,32)f32 #4658=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_463 2 1 4658 4659 input0.33 $input=4658 $shape=4659 #4658=(36,64,6,32)f32 #input0.33=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.2.attn.proj 1 1 input0.33 4661 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.33=(36,64,192)f32 #4661=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.attn.proj_drop 1 1 4661 4662 #4661=(36,64,192)f32 #4662=(36,64,192)f32 prim::Constant pnnx_3106 0 1 20954 value=-1 prim::Constant pnnx_3107 0 1 20955 value=8 prim::Constant pnnx_3108 0 1 20956 value=8 prim::ListConstruct pnnx_3109 4 1 20954 20955 20956 4571 4663 prim::Constant pnnx_3111 0 1 20957 value=8 prim::Constant pnnx_3112 0 1 20958 value=trunc aten::div pnnx_3113 3 1 H.1 20957 20958 4665 aten::Int pnnx_3114 1 1 4665 4666 prim::Constant pnnx_3115 0 1 20959 value=8 prim::Constant pnnx_3116 0 1 20960 value=trunc aten::div pnnx_3117 3 1 W.1 20959 20960 4667 aten::Int pnnx_3118 1 1 4667 4668 prim::Constant pnnx_3119 0 1 20961 value=1 prim::Constant pnnx_3120 0 1 20962 value=8 prim::Constant pnnx_3121 0 1 20963 value=8 prim::Constant pnnx_3122 0 1 20964 value=-1 prim::ListConstruct pnnx_3123 6 1 20961 4666 4668 20962 20963 20964 4669 prim::Constant pnnx_3125 0 1 20965 value=0 prim::Constant pnnx_3126 0 1 20966 value=1 prim::Constant pnnx_3127 0 1 20967 value=3 prim::Constant pnnx_3128 0 1 20968 value=2 prim::Constant pnnx_3129 0 1 20969 value=4 prim::Constant pnnx_3130 0 1 20970 value=5 prim::ListConstruct pnnx_3131 6 1 20965 20966 20967 20968 20969 20970 4671 Tensor.view Tensor.view_1136 2 1 4662 4663 windows.31 $input=4662 $shape=4663 #4662=(36,64,192)f32 #windows.31=(36,8,8,192)f32 Tensor.view Tensor.view_1137 2 1 windows.31 4669 x2.31 $input=windows.31 $shape=4669 #windows.31=(36,8,8,192)f32 #x2.31=(1,6,6,8,8,192)f32 prim::Constant pnnx_3135 0 1 20972 value=1 prim::Constant pnnx_3136 0 1 20973 value=-1 prim::ListConstruct pnnx_3137 4 1 20972 304 544 20973 4674 torch.permute torch.permute_2587 2 1 x2.31 4671 4672 $input=x2.31 $dims=4671 #x2.31=(1,6,6,8,8,192)f32 #4672=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_46 1 1 4672 4673 memory_format=torch.contiguous_format $input=4672 #4672=(1,6,8,6,8,192)f32 #4673=(1,6,8,6,8,192)f32 aten::mul pnnx_3139 2 1 H.1 W.1 4676 aten::Int pnnx_3140 1 1 4676 4677 prim::ListConstruct pnnx_3141 3 1 4566 4677 4570 4678 prim::Constant pnnx_3143 0 1 4680 value=None prim::Constant pnnx_3144 0 1 20974 value=1 Tensor.view Tensor.view_1138 2 1 4673 4674 x3.31 $input=4673 $shape=4674 #4673=(1,6,8,6,8,192)f32 #x3.31=(1,48,48,192)f32 Tensor.view Tensor.view_1139 2 1 x3.31 4678 x4.31 $input=x3.31 $shape=4678 #x3.31=(1,48,48,192)f32 #x4.31=(1,2304,192)f32 aten::add pnnx_3145 3 1 4547 x4.31 20974 input.73 #4547=(1,2304,192)f32 #x4.31=(1,2304,192)f32 #input.73=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.2.norm2 1 1 input.73 4682 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.73=(1,2304,192)f32 #4682=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.2.mlp.fc1 1 1 4682 4687 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4682=(1,2304,192)f32 #4687=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.2.mlp.act 1 1 4687 4688 #4687=(1,2304,384)f32 #4688=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.mlp.drop 1 1 4688 4689 #4688=(1,2304,384)f32 #4689=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.2.mlp.fc2 1 1 4689 4690 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #4689=(1,2304,384)f32 #4690=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.mlp.drop 1 1 4690 4691 #4690=(1,2304,192)f32 #4691=(1,2304,192)f32 prim::Constant pnnx_3146 0 1 4692 value=None prim::Constant pnnx_3147 0 1 20975 value=1 aten::add pnnx_3148 3 1 input.73 4691 20975 4693 #input.73=(1,2304,192)f32 #4691=(1,2304,192)f32 #4693=(1,2304,192)f32 prim::Constant pnnx_3149 0 1 4694 value=trunc prim::Constant pnnx_3150 0 1 4695 value=8 prim::Constant pnnx_3151 0 1 4696 value=0 prim::Constant pnnx_3152 0 1 4697 value=2 prim::Constant pnnx_3153 0 1 4698 value=-4 prim::Constant pnnx_3154 0 1 4699 value=1 prim::Constant pnnx_3155 0 1 4700 value=3 prim::Constant pnnx_3156 0 1 4701 value=8 prim::Constant pnnx_3157 0 1 4702 value=4 prim::Constant pnnx_3158 0 1 4703 value=5 prim::Constant pnnx_3159 0 1 4704 value=-1 prim::Constant pnnx_3160 0 1 4705 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.3 0 1 attn_mask.17 @attn_mask=(36,64,64)f32 #attn_mask.17=(36,64,64)f32 aten::size pnnx_3161 2 1 4693 4696 4712 #4693=(1,2304,192)f32 prim::NumToTensor pnnx_3162 1 1 4712 B.39 aten::Int pnnx_3163 1 1 B.39 4714 aten::Int pnnx_3164 1 1 B.39 4715 aten::size pnnx_3165 2 1 4693 4697 4716 #4693=(1,2304,192)f32 prim::NumToTensor pnnx_3166 1 1 4716 C.71 aten::Int pnnx_3167 1 1 C.71 4718 aten::Int pnnx_3168 1 1 C.71 4719 aten::Int pnnx_3169 1 1 C.71 4720 aten::Int pnnx_3170 1 1 C.71 4721 nn.LayerNorm layers_dfe.2.residual_group.blocks.3.norm1 1 1 4693 4722 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #4693=(1,2304,192)f32 #4722=(1,2304,192)f32 prim::ListConstruct pnnx_3171 4 1 4715 301 541 4721 4723 prim::Constant pnnx_3173 0 1 20976 value=-4 prim::ListConstruct pnnx_3174 2 1 4698 20976 4725 prim::Constant pnnx_3175 0 1 20977 value=2 prim::ListConstruct pnnx_3176 2 1 4699 20977 4726 Tensor.view Tensor.view_1140 2 1 4722 4723 x.33 $input=4722 $shape=4723 #4722=(1,2304,192)f32 #x.33=(1,48,48,192)f32 prim::Constant pnnx_3178 0 1 20978 value=0 torch.roll torch.roll_2434 3 1 x.33 4725 4726 x0.33 $input=x.33 $shifts=4725 $dims=4726 #x.33=(1,48,48,192)f32 #x0.33=(1,48,48,192)f32 aten::size pnnx_3179 2 1 x0.33 20978 4728 #x0.33=(1,48,48,192)f32 prim::NumToTensor pnnx_3180 1 1 4728 B0.33 aten::Int pnnx_3181 1 1 B0.33 4730 prim::Constant pnnx_3182 0 1 20979 value=1 aten::size pnnx_3183 2 1 x0.33 20979 4731 #x0.33=(1,48,48,192)f32 prim::NumToTensor pnnx_3184 1 1 4731 4732 prim::Constant pnnx_3185 0 1 20980 value=2 aten::size pnnx_3186 2 1 x0.33 20980 4733 #x0.33=(1,48,48,192)f32 prim::NumToTensor pnnx_3187 1 1 4733 4734 aten::size pnnx_3188 2 1 x0.33 4700 4735 #x0.33=(1,48,48,192)f32 prim::NumToTensor pnnx_3189 1 1 4735 C0.33 aten::Int pnnx_3190 1 1 C0.33 4737 aten::Int pnnx_3191 1 1 C0.33 4738 aten::div pnnx_3192 3 1 4732 4695 4694 4739 aten::Int pnnx_3193 1 1 4739 4740 prim::Constant pnnx_3194 0 1 20981 value=8 prim::Constant pnnx_3195 0 1 20982 value=trunc aten::div pnnx_3196 3 1 4734 20981 20982 4741 aten::Int pnnx_3197 1 1 4741 4742 prim::Constant pnnx_3198 0 1 20983 value=8 prim::ListConstruct pnnx_3199 6 1 4730 4740 4701 4742 20983 4738 4743 prim::Constant pnnx_3201 0 1 20984 value=0 prim::Constant pnnx_3202 0 1 20985 value=1 prim::Constant pnnx_3203 0 1 20986 value=3 prim::Constant pnnx_3204 0 1 20987 value=2 prim::ListConstruct pnnx_3205 6 1 20984 20985 20986 20987 4702 4703 4745 Tensor.view Tensor.view_1141 2 1 x0.33 4743 x1.33 $input=x0.33 $shape=4743 #x0.33=(1,48,48,192)f32 #x1.33=(1,6,8,6,8,192)f32 prim::Constant pnnx_3209 0 1 20989 value=8 prim::Constant pnnx_3210 0 1 20990 value=8 prim::ListConstruct pnnx_3211 4 1 4704 20989 20990 4737 4748 torch.permute torch.permute_2588 2 1 x1.33 4745 4746 $input=x1.33 $dims=4745 #x1.33=(1,6,8,6,8,192)f32 #4746=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_47 1 1 4746 4747 memory_format=torch.contiguous_format $input=4746 #4746=(1,6,6,8,8,192)f32 #4747=(1,6,6,8,8,192)f32 prim::Constant pnnx_3213 0 1 20991 value=-1 prim::ListConstruct pnnx_3214 3 1 20991 4705 4720 4750 prim::Constant pnnx_3216 0 1 4752 value=1.767767e-01 prim::Constant pnnx_3217 0 1 4753 value=trunc prim::Constant pnnx_3218 0 1 4754 value=6 prim::Constant pnnx_3219 0 1 4755 value=0 prim::Constant pnnx_3220 0 1 4756 value=1 prim::Constant pnnx_3221 0 1 4757 value=2 prim::Constant pnnx_3222 0 1 4758 value=3 prim::Constant pnnx_3223 0 1 4759 value=6 prim::Constant pnnx_3224 0 1 4760 value=4 prim::Constant pnnx_3225 0 1 4761 value=-2 prim::Constant pnnx_3226 0 1 4762 value=-1 prim::Constant pnnx_3227 0 1 4763 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.3.attn 0 1 relative_position_bias_table.33 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.33=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.3.attn 0 1 relative_position_index.33 @relative_position_index=(64,64)i64 #relative_position_index.33=(64,64)i64 Tensor.view Tensor.view_1142 2 1 4747 4748 x_windows.33 $input=4747 $shape=4748 #4747=(1,6,6,8,8,192)f32 #x_windows.33=(36,8,8,192)f32 Tensor.view Tensor.view_1143 2 1 x_windows.33 4750 x2.33 $input=x_windows.33 $shape=4750 #x_windows.33=(36,8,8,192)f32 #x2.33=(36,64,192)f32 aten::size pnnx_3228 2 1 x2.33 4755 4771 #x2.33=(36,64,192)f32 prim::NumToTensor pnnx_3229 1 1 4771 B_.33 aten::Int pnnx_3230 1 1 B_.33 4773 aten::Int pnnx_3231 1 1 B_.33 4774 aten::size pnnx_3232 2 1 x2.33 4756 4775 #x2.33=(36,64,192)f32 prim::NumToTensor pnnx_3233 1 1 4775 N.33 aten::Int pnnx_3234 1 1 N.33 4777 aten::Int pnnx_3235 1 1 N.33 4778 aten::Int pnnx_3236 1 1 N.33 4779 aten::Int pnnx_3237 1 1 N.33 4780 aten::Int pnnx_3238 1 1 N.33 4781 aten::Int pnnx_3239 1 1 N.33 4782 aten::size pnnx_3240 2 1 x2.33 4757 4783 #x2.33=(36,64,192)f32 prim::NumToTensor pnnx_3241 1 1 4783 C.73 aten::Int pnnx_3242 1 1 C.73 4785 nn.Linear layers_dfe.2.residual_group.blocks.3.attn.qkv 1 1 x2.33 4786 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.33=(36,64,192)f32 #4786=(36,64,576)f32 aten::div pnnx_3243 3 1 C.73 4754 4753 4787 aten::Int pnnx_3244 1 1 4787 4788 prim::ListConstruct pnnx_3245 5 1 4774 4782 4758 4759 4788 4789 prim::Constant pnnx_3247 0 1 20992 value=2 prim::Constant pnnx_3248 0 1 20993 value=0 prim::Constant pnnx_3249 0 1 20994 value=3 prim::Constant pnnx_3250 0 1 20995 value=1 prim::ListConstruct pnnx_3251 5 1 20992 20993 20994 20995 4760 4791 Tensor.reshape Tensor.reshape_464 2 1 4786 4789 4790 $input=4786 $shape=4789 #4786=(36,64,576)f32 #4790=(36,64,3,6,32)f32 prim::Constant pnnx_3253 0 1 20996 value=0 prim::Constant pnnx_3254 0 1 20997 value=0 prim::Constant pnnx_3256 0 1 20998 value=0 prim::Constant pnnx_3257 0 1 20999 value=1 prim::Constant pnnx_3259 0 1 21000 value=0 prim::Constant pnnx_3260 0 1 21001 value=2 torch.permute torch.permute_2589 2 1 4790 4791 qkv0.33 $input=4790 $dims=4791 #4790=(36,64,3,6,32)f32 #qkv0.33=(3,36,6,64,32)f32 Tensor.select Tensor.select_695 3 1 qkv0.33 20996 20997 q.33 $input=qkv0.33 $dim=20996 $index=20997 #qkv0.33=(3,36,6,64,32)f32 #q.33=(36,6,64,32)f32 aten::mul pnnx_3262 2 1 q.33 4752 q0.33 #q.33=(36,6,64,32)f32 #q0.33=(36,6,64,32)f32 Tensor.select Tensor.select_696 3 1 qkv0.33 20998 20999 k.33 $input=qkv0.33 $dim=20998 $index=20999 #qkv0.33=(3,36,6,64,32)f32 #k.33=(36,6,64,32)f32 prim::Constant pnnx_3265 0 1 21002 value=-1 prim::ListConstruct pnnx_3266 1 1 21002 4799 Tensor.view Tensor.view_1144 2 1 relative_position_index.33 4799 4800 $input=relative_position_index.33 $shape=4799 #relative_position_index.33=(64,64)i64 #4800=(4096)i64 prim::ListConstruct pnnx_3268 1 1 4800 4801 #4800=(4096)i64 prim::Constant pnnx_3270 0 1 21003 value=64 prim::Constant pnnx_3271 0 1 21004 value=-1 prim::ListConstruct pnnx_3272 3 1 4763 21003 21004 4803 Tensor.index Tensor.index_341 2 1 relative_position_bias_table.33 4801 4802 $input=relative_position_bias_table.33 $expr=4801 #relative_position_bias_table.33=(225,6)f32 #4802=(4096,6)f32 prim::Constant pnnx_3274 0 1 21005 value=2 prim::Constant pnnx_3275 0 1 21006 value=0 prim::Constant pnnx_3276 0 1 21007 value=1 prim::ListConstruct pnnx_3277 3 1 21005 21006 21007 4805 Tensor.view Tensor.view_1145 2 1 4802 4803 relative_position_bias.33 $input=4802 $shape=4803 #4802=(4096,6)f32 #relative_position_bias.33=(64,64,6)f32 prim::Constant pnnx_3281 0 1 21009 value=0 torch.permute torch.permute_2590 2 1 relative_position_bias.33 4805 4806 $input=relative_position_bias.33 $dims=4805 #relative_position_bias.33=(64,64,6)f32 #4806=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_48 1 1 4806 relative_position_bias0.33 memory_format=torch.contiguous_format $input=4806 #4806=(6,64,64)f32 #relative_position_bias0.33=(6,64,64)f32 prim::Constant pnnx_3283 0 1 21010 value=1 torch.transpose torch.transpose_2995 3 1 k.33 4761 4762 4797 $input=k.33 $dim0=4761 $dim1=4762 #k.33=(36,6,64,32)f32 #4797=(36,6,32,64)f32 torch.matmul torch.matmul_2234 2 1 q0.33 4797 attn.67 $input=q0.33 $other=4797 #q0.33=(36,6,64,32)f32 #4797=(36,6,32,64)f32 #attn.67=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3247 2 1 relative_position_bias0.33 21009 4808 $input=relative_position_bias0.33 $dim=21009 #relative_position_bias0.33=(6,64,64)f32 #4808=(1,6,64,64)f32 aten::add pnnx_3284 3 1 attn.67 4808 21010 attn0.17 #attn.67=(36,6,64,64)f32 #4808=(1,6,64,64)f32 #attn0.17=(36,6,64,64)f32 prim::Constant pnnx_3285 0 1 21011 value=0 aten::size pnnx_3286 2 1 attn_mask.17 21011 4810 #attn_mask.17=(36,64,64)f32 prim::NumToTensor pnnx_3287 1 1 4810 other.17 aten::Int pnnx_3288 1 1 other.17 4812 prim::Constant pnnx_3289 0 1 21012 value=trunc aten::div pnnx_3290 3 1 B_.33 other.17 21012 4813 aten::Int pnnx_3291 1 1 4813 4814 prim::Constant pnnx_3292 0 1 21013 value=6 prim::ListConstruct pnnx_3293 5 1 4814 4812 21013 4781 4780 4815 prim::Constant pnnx_3295 0 1 21014 value=1 prim::Constant pnnx_3297 0 1 21015 value=0 prim::Constant pnnx_3299 0 1 21016 value=1 Tensor.view Tensor.view_1146 2 1 attn0.17 4815 4816 $input=attn0.17 $shape=4815 #attn0.17=(36,6,64,64)f32 #4816=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3248 2 1 attn_mask.17 21014 4817 $input=attn_mask.17 $dim=21014 #attn_mask.17=(36,64,64)f32 #4817=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3249 2 1 4817 21015 4818 $input=4817 $dim=21015 #4817=(36,1,64,64)f32 #4818=(1,36,1,64,64)f32 aten::add pnnx_3300 3 1 4816 4818 21016 attn1.17 #4816=(1,36,6,64,64)f32 #4818=(1,36,1,64,64)f32 #attn1.17=(1,36,6,64,64)f32 prim::Constant pnnx_3301 0 1 21017 value=-1 prim::Constant pnnx_3302 0 1 21018 value=6 prim::ListConstruct pnnx_3303 4 1 21017 21018 4779 4778 4820 Tensor.view Tensor.view_1147 2 1 attn1.17 4820 input.75 $input=attn1.17 $shape=4820 #attn1.17=(1,36,6,64,64)f32 #input.75=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.3.attn.softmax 1 1 input.75 4822 dim=-1 #input.75=(36,6,64,64)f32 #4822=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.attn.attn_drop 1 1 4822 4823 #4822=(36,6,64,64)f32 #4823=(36,6,64,64)f32 Tensor.select Tensor.select_697 3 1 qkv0.33 21000 21001 v.33 $input=qkv0.33 $dim=21000 $index=21001 #qkv0.33=(3,36,6,64,32)f32 #v.33=(36,6,64,32)f32 prim::Constant pnnx_3306 0 1 21019 value=1 prim::Constant pnnx_3307 0 1 21020 value=2 torch.matmul torch.matmul_2235 2 1 4823 v.33 4824 $input=4823 $other=v.33 #4823=(36,6,64,64)f32 #v.33=(36,6,64,32)f32 #4824=(36,6,64,32)f32 prim::ListConstruct pnnx_3309 3 1 4773 4777 4785 4826 torch.transpose torch.transpose_2996 3 1 4824 21019 21020 4825 $input=4824 $dim0=21019 $dim1=21020 #4824=(36,6,64,32)f32 #4825=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_465 2 1 4825 4826 input0.35 $input=4825 $shape=4826 #4825=(36,64,6,32)f32 #input0.35=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.3.attn.proj 1 1 input0.35 4828 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.35=(36,64,192)f32 #4828=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.attn.proj_drop 1 1 4828 4829 #4828=(36,64,192)f32 #4829=(36,64,192)f32 prim::Constant pnnx_3311 0 1 21021 value=-1 prim::Constant pnnx_3312 0 1 21022 value=8 prim::Constant pnnx_3313 0 1 21023 value=8 prim::ListConstruct pnnx_3314 4 1 21021 21022 21023 4719 4830 prim::Constant pnnx_3316 0 1 21024 value=8 prim::Constant pnnx_3317 0 1 21025 value=trunc aten::div pnnx_3318 3 1 H.1 21024 21025 4832 aten::Int pnnx_3319 1 1 4832 4833 prim::Constant pnnx_3320 0 1 21026 value=8 prim::Constant pnnx_3321 0 1 21027 value=trunc aten::div pnnx_3322 3 1 W.1 21026 21027 4834 aten::Int pnnx_3323 1 1 4834 4835 prim::Constant pnnx_3324 0 1 21028 value=1 prim::Constant pnnx_3325 0 1 21029 value=8 prim::Constant pnnx_3326 0 1 21030 value=8 prim::Constant pnnx_3327 0 1 21031 value=-1 prim::ListConstruct pnnx_3328 6 1 21028 4833 4835 21029 21030 21031 4836 prim::Constant pnnx_3330 0 1 21032 value=0 prim::Constant pnnx_3331 0 1 21033 value=1 prim::Constant pnnx_3332 0 1 21034 value=3 prim::Constant pnnx_3333 0 1 21035 value=2 prim::Constant pnnx_3334 0 1 21036 value=4 prim::Constant pnnx_3335 0 1 21037 value=5 prim::ListConstruct pnnx_3336 6 1 21032 21033 21034 21035 21036 21037 4838 Tensor.view Tensor.view_1148 2 1 4829 4830 windows.33 $input=4829 $shape=4830 #4829=(36,64,192)f32 #windows.33=(36,8,8,192)f32 Tensor.view Tensor.view_1149 2 1 windows.33 4836 x3.33 $input=windows.33 $shape=4836 #windows.33=(36,8,8,192)f32 #x3.33=(1,6,6,8,8,192)f32 prim::Constant pnnx_3340 0 1 21039 value=1 prim::Constant pnnx_3341 0 1 21040 value=-1 prim::ListConstruct pnnx_3342 4 1 21039 298 538 21040 4841 torch.permute torch.permute_2591 2 1 x3.33 4838 4839 $input=x3.33 $dims=4838 #x3.33=(1,6,6,8,8,192)f32 #4839=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_49 1 1 4839 4840 memory_format=torch.contiguous_format $input=4839 #4839=(1,6,8,6,8,192)f32 #4840=(1,6,8,6,8,192)f32 prim::Constant pnnx_3344 0 1 21041 value=4 prim::Constant pnnx_3345 0 1 21042 value=4 prim::ListConstruct pnnx_3346 2 1 21041 21042 4843 prim::Constant pnnx_3347 0 1 21043 value=1 prim::Constant pnnx_3348 0 1 21044 value=2 prim::ListConstruct pnnx_3349 2 1 21043 21044 4844 Tensor.view Tensor.view_1150 2 1 4840 4841 shifted_x.17 $input=4840 $shape=4841 #4840=(1,6,8,6,8,192)f32 #shifted_x.17=(1,48,48,192)f32 aten::mul pnnx_3351 2 1 H.1 W.1 4846 aten::Int pnnx_3352 1 1 4846 4847 prim::ListConstruct pnnx_3353 3 1 4714 4847 4718 4848 prim::Constant pnnx_3355 0 1 4850 value=None prim::Constant pnnx_3356 0 1 21045 value=1 torch.roll torch.roll_2435 3 1 shifted_x.17 4843 4844 x4.33 $input=shifted_x.17 $shifts=4843 $dims=4844 #shifted_x.17=(1,48,48,192)f32 #x4.33=(1,48,48,192)f32 Tensor.view Tensor.view_1151 2 1 x4.33 4848 x5.17 $input=x4.33 $shape=4848 #x4.33=(1,48,48,192)f32 #x5.17=(1,2304,192)f32 aten::add pnnx_3357 3 1 4693 x5.17 21045 input.77 #4693=(1,2304,192)f32 #x5.17=(1,2304,192)f32 #input.77=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.3.norm2 1 1 input.77 4852 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.77=(1,2304,192)f32 #4852=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.3.mlp.fc1 1 1 4852 4857 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4852=(1,2304,192)f32 #4857=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.3.mlp.act 1 1 4857 4858 #4857=(1,2304,384)f32 #4858=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.mlp.drop 1 1 4858 4859 #4858=(1,2304,384)f32 #4859=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.3.mlp.fc2 1 1 4859 4860 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #4859=(1,2304,384)f32 #4860=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.mlp.drop 1 1 4860 4861 #4860=(1,2304,192)f32 #4861=(1,2304,192)f32 prim::Constant pnnx_3358 0 1 4862 value=None prim::Constant pnnx_3359 0 1 21046 value=1 aten::add pnnx_3360 3 1 input.77 4861 21046 4863 #input.77=(1,2304,192)f32 #4861=(1,2304,192)f32 #4863=(1,2304,192)f32 prim::Constant pnnx_3361 0 1 4864 value=trunc prim::Constant pnnx_3362 0 1 4865 value=8 prim::Constant pnnx_3363 0 1 4866 value=0 prim::Constant pnnx_3364 0 1 4867 value=2 prim::Constant pnnx_3365 0 1 4868 value=1 prim::Constant pnnx_3366 0 1 4869 value=3 prim::Constant pnnx_3367 0 1 4870 value=8 prim::Constant pnnx_3368 0 1 4871 value=4 prim::Constant pnnx_3369 0 1 4872 value=5 prim::Constant pnnx_3370 0 1 4873 value=-1 prim::Constant pnnx_3371 0 1 4874 value=64 aten::size pnnx_3372 2 1 4863 4866 4880 #4863=(1,2304,192)f32 prim::NumToTensor pnnx_3373 1 1 4880 B.41 aten::Int pnnx_3374 1 1 B.41 4882 aten::Int pnnx_3375 1 1 B.41 4883 aten::size pnnx_3376 2 1 4863 4867 4884 #4863=(1,2304,192)f32 prim::NumToTensor pnnx_3377 1 1 4884 C.75 aten::Int pnnx_3378 1 1 C.75 4886 aten::Int pnnx_3379 1 1 C.75 4887 aten::Int pnnx_3380 1 1 C.75 4888 aten::Int pnnx_3381 1 1 C.75 4889 nn.LayerNorm layers_dfe.2.residual_group.blocks.4.norm1 1 1 4863 4890 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #4863=(1,2304,192)f32 #4890=(1,2304,192)f32 prim::ListConstruct pnnx_3382 4 1 4883 295 535 4889 4891 prim::Constant pnnx_3384 0 1 21047 value=0 Tensor.view Tensor.view_1152 2 1 4890 4891 x.35 $input=4890 $shape=4891 #4890=(1,2304,192)f32 #x.35=(1,48,48,192)f32 aten::size pnnx_3385 2 1 x.35 21047 4893 #x.35=(1,48,48,192)f32 prim::NumToTensor pnnx_3386 1 1 4893 B0.35 aten::Int pnnx_3387 1 1 B0.35 4895 aten::size pnnx_3388 2 1 x.35 4868 4896 #x.35=(1,48,48,192)f32 prim::NumToTensor pnnx_3389 1 1 4896 4897 prim::Constant pnnx_3390 0 1 21048 value=2 aten::size pnnx_3391 2 1 x.35 21048 4898 #x.35=(1,48,48,192)f32 prim::NumToTensor pnnx_3392 1 1 4898 4899 aten::size pnnx_3393 2 1 x.35 4869 4900 #x.35=(1,48,48,192)f32 prim::NumToTensor pnnx_3394 1 1 4900 C0.35 aten::Int pnnx_3395 1 1 C0.35 4902 aten::Int pnnx_3396 1 1 C0.35 4903 aten::div pnnx_3397 3 1 4897 4865 4864 4904 aten::Int pnnx_3398 1 1 4904 4905 prim::Constant pnnx_3399 0 1 21049 value=8 prim::Constant pnnx_3400 0 1 21050 value=trunc aten::div pnnx_3401 3 1 4899 21049 21050 4906 aten::Int pnnx_3402 1 1 4906 4907 prim::Constant pnnx_3403 0 1 21051 value=8 prim::ListConstruct pnnx_3404 6 1 4895 4905 4870 4907 21051 4903 4908 prim::Constant pnnx_3406 0 1 21052 value=0 prim::Constant pnnx_3407 0 1 21053 value=1 prim::Constant pnnx_3408 0 1 21054 value=3 prim::Constant pnnx_3409 0 1 21055 value=2 prim::ListConstruct pnnx_3410 6 1 21052 21053 21054 21055 4871 4872 4910 Tensor.view Tensor.view_1153 2 1 x.35 4908 x0.35 $input=x.35 $shape=4908 #x.35=(1,48,48,192)f32 #x0.35=(1,6,8,6,8,192)f32 prim::Constant pnnx_3414 0 1 21057 value=8 prim::Constant pnnx_3415 0 1 21058 value=8 prim::ListConstruct pnnx_3416 4 1 4873 21057 21058 4902 4913 torch.permute torch.permute_2592 2 1 x0.35 4910 4911 $input=x0.35 $dims=4910 #x0.35=(1,6,8,6,8,192)f32 #4911=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_50 1 1 4911 4912 memory_format=torch.contiguous_format $input=4911 #4911=(1,6,6,8,8,192)f32 #4912=(1,6,6,8,8,192)f32 prim::Constant pnnx_3418 0 1 21059 value=-1 prim::ListConstruct pnnx_3419 3 1 21059 4874 4888 4915 prim::Constant pnnx_3421 0 1 4917 value=1.767767e-01 prim::Constant pnnx_3422 0 1 4918 value=trunc prim::Constant pnnx_3423 0 1 4919 value=6 prim::Constant pnnx_3424 0 1 4920 value=0 prim::Constant pnnx_3425 0 1 4921 value=1 prim::Constant pnnx_3426 0 1 4922 value=2 prim::Constant pnnx_3427 0 1 4923 value=3 prim::Constant pnnx_3428 0 1 4924 value=6 prim::Constant pnnx_3429 0 1 4925 value=4 prim::Constant pnnx_3430 0 1 4926 value=-2 prim::Constant pnnx_3431 0 1 4927 value=-1 prim::Constant pnnx_3432 0 1 4928 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.4.attn 0 1 relative_position_bias_table.35 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.35=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.4.attn 0 1 relative_position_index.35 @relative_position_index=(64,64)i64 #relative_position_index.35=(64,64)i64 Tensor.view Tensor.view_1154 2 1 4912 4913 x_windows.35 $input=4912 $shape=4913 #4912=(1,6,6,8,8,192)f32 #x_windows.35=(36,8,8,192)f32 Tensor.view Tensor.view_1155 2 1 x_windows.35 4915 x1.35 $input=x_windows.35 $shape=4915 #x_windows.35=(36,8,8,192)f32 #x1.35=(36,64,192)f32 aten::size pnnx_3433 2 1 x1.35 4920 4936 #x1.35=(36,64,192)f32 prim::NumToTensor pnnx_3434 1 1 4936 B_.35 aten::Int pnnx_3435 1 1 B_.35 4938 aten::Int pnnx_3436 1 1 B_.35 4939 aten::size pnnx_3437 2 1 x1.35 4921 4940 #x1.35=(36,64,192)f32 prim::NumToTensor pnnx_3438 1 1 4940 N.35 aten::Int pnnx_3439 1 1 N.35 4942 aten::Int pnnx_3440 1 1 N.35 4943 aten::size pnnx_3441 2 1 x1.35 4922 4944 #x1.35=(36,64,192)f32 prim::NumToTensor pnnx_3442 1 1 4944 C.77 aten::Int pnnx_3443 1 1 C.77 4946 nn.Linear layers_dfe.2.residual_group.blocks.4.attn.qkv 1 1 x1.35 4947 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.35=(36,64,192)f32 #4947=(36,64,576)f32 aten::div pnnx_3444 3 1 C.77 4919 4918 4948 aten::Int pnnx_3445 1 1 4948 4949 prim::ListConstruct pnnx_3446 5 1 4939 4943 4923 4924 4949 4950 prim::Constant pnnx_3448 0 1 21060 value=2 prim::Constant pnnx_3449 0 1 21061 value=0 prim::Constant pnnx_3450 0 1 21062 value=3 prim::Constant pnnx_3451 0 1 21063 value=1 prim::ListConstruct pnnx_3452 5 1 21060 21061 21062 21063 4925 4952 Tensor.reshape Tensor.reshape_466 2 1 4947 4950 4951 $input=4947 $shape=4950 #4947=(36,64,576)f32 #4951=(36,64,3,6,32)f32 prim::Constant pnnx_3454 0 1 21064 value=0 prim::Constant pnnx_3455 0 1 21065 value=0 prim::Constant pnnx_3457 0 1 21066 value=0 prim::Constant pnnx_3458 0 1 21067 value=1 prim::Constant pnnx_3460 0 1 21068 value=0 prim::Constant pnnx_3461 0 1 21069 value=2 torch.permute torch.permute_2593 2 1 4951 4952 qkv0.35 $input=4951 $dims=4952 #4951=(36,64,3,6,32)f32 #qkv0.35=(3,36,6,64,32)f32 Tensor.select Tensor.select_698 3 1 qkv0.35 21064 21065 q.35 $input=qkv0.35 $dim=21064 $index=21065 #qkv0.35=(3,36,6,64,32)f32 #q.35=(36,6,64,32)f32 aten::mul pnnx_3463 2 1 q.35 4917 q0.35 #q.35=(36,6,64,32)f32 #q0.35=(36,6,64,32)f32 Tensor.select Tensor.select_699 3 1 qkv0.35 21066 21067 k.35 $input=qkv0.35 $dim=21066 $index=21067 #qkv0.35=(3,36,6,64,32)f32 #k.35=(36,6,64,32)f32 prim::Constant pnnx_3466 0 1 21070 value=-1 prim::ListConstruct pnnx_3467 1 1 21070 4960 Tensor.view Tensor.view_1156 2 1 relative_position_index.35 4960 4961 $input=relative_position_index.35 $shape=4960 #relative_position_index.35=(64,64)i64 #4961=(4096)i64 prim::ListConstruct pnnx_3469 1 1 4961 4962 #4961=(4096)i64 prim::Constant pnnx_3471 0 1 21071 value=64 prim::Constant pnnx_3472 0 1 21072 value=-1 prim::ListConstruct pnnx_3473 3 1 4928 21071 21072 4964 Tensor.index Tensor.index_342 2 1 relative_position_bias_table.35 4962 4963 $input=relative_position_bias_table.35 $expr=4962 #relative_position_bias_table.35=(225,6)f32 #4963=(4096,6)f32 prim::Constant pnnx_3475 0 1 21073 value=2 prim::Constant pnnx_3476 0 1 21074 value=0 prim::Constant pnnx_3477 0 1 21075 value=1 prim::ListConstruct pnnx_3478 3 1 21073 21074 21075 4966 Tensor.view Tensor.view_1157 2 1 4963 4964 relative_position_bias.35 $input=4963 $shape=4964 #4963=(4096,6)f32 #relative_position_bias.35=(64,64,6)f32 prim::Constant pnnx_3482 0 1 21077 value=0 torch.permute torch.permute_2594 2 1 relative_position_bias.35 4966 4967 $input=relative_position_bias.35 $dims=4966 #relative_position_bias.35=(64,64,6)f32 #4967=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_51 1 1 4967 relative_position_bias0.35 memory_format=torch.contiguous_format $input=4967 #4967=(6,64,64)f32 #relative_position_bias0.35=(6,64,64)f32 prim::Constant pnnx_3484 0 1 21078 value=1 torch.transpose torch.transpose_2997 3 1 k.35 4926 4927 4958 $input=k.35 $dim0=4926 $dim1=4927 #k.35=(36,6,64,32)f32 #4958=(36,6,32,64)f32 torch.matmul torch.matmul_2236 2 1 q0.35 4958 attn.71 $input=q0.35 $other=4958 #q0.35=(36,6,64,32)f32 #4958=(36,6,32,64)f32 #attn.71=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3250 2 1 relative_position_bias0.35 21077 4969 $input=relative_position_bias0.35 $dim=21077 #relative_position_bias0.35=(6,64,64)f32 #4969=(1,6,64,64)f32 aten::add pnnx_3485 3 1 attn.71 4969 21078 input.79 #attn.71=(36,6,64,64)f32 #4969=(1,6,64,64)f32 #input.79=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.4.attn.softmax 1 1 input.79 4971 dim=-1 #input.79=(36,6,64,64)f32 #4971=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.attn.attn_drop 1 1 4971 4972 #4971=(36,6,64,64)f32 #4972=(36,6,64,64)f32 Tensor.select Tensor.select_700 3 1 qkv0.35 21068 21069 v.35 $input=qkv0.35 $dim=21068 $index=21069 #qkv0.35=(3,36,6,64,32)f32 #v.35=(36,6,64,32)f32 prim::Constant pnnx_3487 0 1 21079 value=1 prim::Constant pnnx_3488 0 1 21080 value=2 torch.matmul torch.matmul_2237 2 1 4972 v.35 4973 $input=4972 $other=v.35 #4972=(36,6,64,64)f32 #v.35=(36,6,64,32)f32 #4973=(36,6,64,32)f32 prim::ListConstruct pnnx_3490 3 1 4938 4942 4946 4975 torch.transpose torch.transpose_2998 3 1 4973 21079 21080 4974 $input=4973 $dim0=21079 $dim1=21080 #4973=(36,6,64,32)f32 #4974=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_467 2 1 4974 4975 input0.37 $input=4974 $shape=4975 #4974=(36,64,6,32)f32 #input0.37=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.4.attn.proj 1 1 input0.37 4977 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.37=(36,64,192)f32 #4977=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.attn.proj_drop 1 1 4977 4978 #4977=(36,64,192)f32 #4978=(36,64,192)f32 prim::Constant pnnx_3492 0 1 21081 value=-1 prim::Constant pnnx_3493 0 1 21082 value=8 prim::Constant pnnx_3494 0 1 21083 value=8 prim::ListConstruct pnnx_3495 4 1 21081 21082 21083 4887 4979 prim::Constant pnnx_3497 0 1 21084 value=8 prim::Constant pnnx_3498 0 1 21085 value=trunc aten::div pnnx_3499 3 1 H.1 21084 21085 4981 aten::Int pnnx_3500 1 1 4981 4982 prim::Constant pnnx_3501 0 1 21086 value=8 prim::Constant pnnx_3502 0 1 21087 value=trunc aten::div pnnx_3503 3 1 W.1 21086 21087 4983 aten::Int pnnx_3504 1 1 4983 4984 prim::Constant pnnx_3505 0 1 21088 value=1 prim::Constant pnnx_3506 0 1 21089 value=8 prim::Constant pnnx_3507 0 1 21090 value=8 prim::Constant pnnx_3508 0 1 21091 value=-1 prim::ListConstruct pnnx_3509 6 1 21088 4982 4984 21089 21090 21091 4985 prim::Constant pnnx_3511 0 1 21092 value=0 prim::Constant pnnx_3512 0 1 21093 value=1 prim::Constant pnnx_3513 0 1 21094 value=3 prim::Constant pnnx_3514 0 1 21095 value=2 prim::Constant pnnx_3515 0 1 21096 value=4 prim::Constant pnnx_3516 0 1 21097 value=5 prim::ListConstruct pnnx_3517 6 1 21092 21093 21094 21095 21096 21097 4987 Tensor.view Tensor.view_1158 2 1 4978 4979 windows.35 $input=4978 $shape=4979 #4978=(36,64,192)f32 #windows.35=(36,8,8,192)f32 Tensor.view Tensor.view_1159 2 1 windows.35 4985 x2.35 $input=windows.35 $shape=4985 #windows.35=(36,8,8,192)f32 #x2.35=(1,6,6,8,8,192)f32 prim::Constant pnnx_3521 0 1 21099 value=1 prim::Constant pnnx_3522 0 1 21100 value=-1 prim::ListConstruct pnnx_3523 4 1 21099 292 532 21100 4990 torch.permute torch.permute_2595 2 1 x2.35 4987 4988 $input=x2.35 $dims=4987 #x2.35=(1,6,6,8,8,192)f32 #4988=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_52 1 1 4988 4989 memory_format=torch.contiguous_format $input=4988 #4988=(1,6,8,6,8,192)f32 #4989=(1,6,8,6,8,192)f32 aten::mul pnnx_3525 2 1 H.1 W.1 4992 aten::Int pnnx_3526 1 1 4992 4993 prim::ListConstruct pnnx_3527 3 1 4882 4993 4886 4994 prim::Constant pnnx_3529 0 1 4996 value=None prim::Constant pnnx_3530 0 1 21101 value=1 Tensor.view Tensor.view_1160 2 1 4989 4990 x3.35 $input=4989 $shape=4990 #4989=(1,6,8,6,8,192)f32 #x3.35=(1,48,48,192)f32 Tensor.view Tensor.view_1161 2 1 x3.35 4994 x4.35 $input=x3.35 $shape=4994 #x3.35=(1,48,48,192)f32 #x4.35=(1,2304,192)f32 aten::add pnnx_3531 3 1 4863 x4.35 21101 input.81 #4863=(1,2304,192)f32 #x4.35=(1,2304,192)f32 #input.81=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.4.norm2 1 1 input.81 4998 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.81=(1,2304,192)f32 #4998=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.4.mlp.fc1 1 1 4998 5003 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #4998=(1,2304,192)f32 #5003=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.4.mlp.act 1 1 5003 5004 #5003=(1,2304,384)f32 #5004=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.mlp.drop 1 1 5004 5005 #5004=(1,2304,384)f32 #5005=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.4.mlp.fc2 1 1 5005 5006 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5005=(1,2304,384)f32 #5006=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.mlp.drop 1 1 5006 5007 #5006=(1,2304,192)f32 #5007=(1,2304,192)f32 prim::Constant pnnx_3532 0 1 5008 value=None prim::Constant pnnx_3533 0 1 21102 value=1 aten::add pnnx_3534 3 1 input.81 5007 21102 5009 #input.81=(1,2304,192)f32 #5007=(1,2304,192)f32 #5009=(1,2304,192)f32 prim::Constant pnnx_3535 0 1 5010 value=trunc prim::Constant pnnx_3536 0 1 5011 value=8 prim::Constant pnnx_3537 0 1 5012 value=0 prim::Constant pnnx_3538 0 1 5013 value=2 prim::Constant pnnx_3539 0 1 5014 value=-4 prim::Constant pnnx_3540 0 1 5015 value=1 prim::Constant pnnx_3541 0 1 5016 value=3 prim::Constant pnnx_3542 0 1 5017 value=8 prim::Constant pnnx_3543 0 1 5018 value=4 prim::Constant pnnx_3544 0 1 5019 value=5 prim::Constant pnnx_3545 0 1 5020 value=-1 prim::Constant pnnx_3546 0 1 5021 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.5 0 1 attn_mask.19 @attn_mask=(36,64,64)f32 #attn_mask.19=(36,64,64)f32 aten::size pnnx_3547 2 1 5009 5012 5028 #5009=(1,2304,192)f32 prim::NumToTensor pnnx_3548 1 1 5028 B.43 aten::Int pnnx_3549 1 1 B.43 5030 aten::Int pnnx_3550 1 1 B.43 5031 aten::size pnnx_3551 2 1 5009 5013 5032 #5009=(1,2304,192)f32 prim::NumToTensor pnnx_3552 1 1 5032 C.79 aten::Int pnnx_3553 1 1 C.79 5034 aten::Int pnnx_3554 1 1 C.79 5035 aten::Int pnnx_3555 1 1 C.79 5036 aten::Int pnnx_3556 1 1 C.79 5037 nn.LayerNorm layers_dfe.2.residual_group.blocks.5.norm1 1 1 5009 5038 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5009=(1,2304,192)f32 #5038=(1,2304,192)f32 prim::ListConstruct pnnx_3557 4 1 5031 289 529 5037 5039 prim::Constant pnnx_3559 0 1 21103 value=-4 prim::ListConstruct pnnx_3560 2 1 5014 21103 5041 prim::Constant pnnx_3561 0 1 21104 value=2 prim::ListConstruct pnnx_3562 2 1 5015 21104 5042 Tensor.view Tensor.view_1162 2 1 5038 5039 x.37 $input=5038 $shape=5039 #5038=(1,2304,192)f32 #x.37=(1,48,48,192)f32 prim::Constant pnnx_3564 0 1 21105 value=0 torch.roll torch.roll_2436 3 1 x.37 5041 5042 x0.37 $input=x.37 $shifts=5041 $dims=5042 #x.37=(1,48,48,192)f32 #x0.37=(1,48,48,192)f32 aten::size pnnx_3565 2 1 x0.37 21105 5044 #x0.37=(1,48,48,192)f32 prim::NumToTensor pnnx_3566 1 1 5044 B0.37 aten::Int pnnx_3567 1 1 B0.37 5046 prim::Constant pnnx_3568 0 1 21106 value=1 aten::size pnnx_3569 2 1 x0.37 21106 5047 #x0.37=(1,48,48,192)f32 prim::NumToTensor pnnx_3570 1 1 5047 5048 prim::Constant pnnx_3571 0 1 21107 value=2 aten::size pnnx_3572 2 1 x0.37 21107 5049 #x0.37=(1,48,48,192)f32 prim::NumToTensor pnnx_3573 1 1 5049 5050 aten::size pnnx_3574 2 1 x0.37 5016 5051 #x0.37=(1,48,48,192)f32 prim::NumToTensor pnnx_3575 1 1 5051 C0.37 aten::Int pnnx_3576 1 1 C0.37 5053 aten::Int pnnx_3577 1 1 C0.37 5054 aten::div pnnx_3578 3 1 5048 5011 5010 5055 aten::Int pnnx_3579 1 1 5055 5056 prim::Constant pnnx_3580 0 1 21108 value=8 prim::Constant pnnx_3581 0 1 21109 value=trunc aten::div pnnx_3582 3 1 5050 21108 21109 5057 aten::Int pnnx_3583 1 1 5057 5058 prim::Constant pnnx_3584 0 1 21110 value=8 prim::ListConstruct pnnx_3585 6 1 5046 5056 5017 5058 21110 5054 5059 prim::Constant pnnx_3587 0 1 21111 value=0 prim::Constant pnnx_3588 0 1 21112 value=1 prim::Constant pnnx_3589 0 1 21113 value=3 prim::Constant pnnx_3590 0 1 21114 value=2 prim::ListConstruct pnnx_3591 6 1 21111 21112 21113 21114 5018 5019 5061 Tensor.view Tensor.view_1163 2 1 x0.37 5059 x1.37 $input=x0.37 $shape=5059 #x0.37=(1,48,48,192)f32 #x1.37=(1,6,8,6,8,192)f32 prim::Constant pnnx_3595 0 1 21116 value=8 prim::Constant pnnx_3596 0 1 21117 value=8 prim::ListConstruct pnnx_3597 4 1 5020 21116 21117 5053 5064 torch.permute torch.permute_2596 2 1 x1.37 5061 5062 $input=x1.37 $dims=5061 #x1.37=(1,6,8,6,8,192)f32 #5062=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_53 1 1 5062 5063 memory_format=torch.contiguous_format $input=5062 #5062=(1,6,6,8,8,192)f32 #5063=(1,6,6,8,8,192)f32 prim::Constant pnnx_3599 0 1 21118 value=-1 prim::ListConstruct pnnx_3600 3 1 21118 5021 5036 5066 prim::Constant pnnx_3602 0 1 5068 value=1.767767e-01 prim::Constant pnnx_3603 0 1 5069 value=trunc prim::Constant pnnx_3604 0 1 5070 value=6 prim::Constant pnnx_3605 0 1 5071 value=0 prim::Constant pnnx_3606 0 1 5072 value=1 prim::Constant pnnx_3607 0 1 5073 value=2 prim::Constant pnnx_3608 0 1 5074 value=3 prim::Constant pnnx_3609 0 1 5075 value=6 prim::Constant pnnx_3610 0 1 5076 value=4 prim::Constant pnnx_3611 0 1 5077 value=-2 prim::Constant pnnx_3612 0 1 5078 value=-1 prim::Constant pnnx_3613 0 1 5079 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.5.attn 0 1 relative_position_bias_table.37 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.37=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.5.attn 0 1 relative_position_index.37 @relative_position_index=(64,64)i64 #relative_position_index.37=(64,64)i64 Tensor.view Tensor.view_1164 2 1 5063 5064 x_windows.37 $input=5063 $shape=5064 #5063=(1,6,6,8,8,192)f32 #x_windows.37=(36,8,8,192)f32 Tensor.view Tensor.view_1165 2 1 x_windows.37 5066 x2.37 $input=x_windows.37 $shape=5066 #x_windows.37=(36,8,8,192)f32 #x2.37=(36,64,192)f32 aten::size pnnx_3614 2 1 x2.37 5071 5087 #x2.37=(36,64,192)f32 prim::NumToTensor pnnx_3615 1 1 5087 B_.37 aten::Int pnnx_3616 1 1 B_.37 5089 aten::Int pnnx_3617 1 1 B_.37 5090 aten::size pnnx_3618 2 1 x2.37 5072 5091 #x2.37=(36,64,192)f32 prim::NumToTensor pnnx_3619 1 1 5091 N.37 aten::Int pnnx_3620 1 1 N.37 5093 aten::Int pnnx_3621 1 1 N.37 5094 aten::Int pnnx_3622 1 1 N.37 5095 aten::Int pnnx_3623 1 1 N.37 5096 aten::Int pnnx_3624 1 1 N.37 5097 aten::Int pnnx_3625 1 1 N.37 5098 aten::size pnnx_3626 2 1 x2.37 5073 5099 #x2.37=(36,64,192)f32 prim::NumToTensor pnnx_3627 1 1 5099 C.81 aten::Int pnnx_3628 1 1 C.81 5101 nn.Linear layers_dfe.2.residual_group.blocks.5.attn.qkv 1 1 x2.37 5102 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.37=(36,64,192)f32 #5102=(36,64,576)f32 aten::div pnnx_3629 3 1 C.81 5070 5069 5103 aten::Int pnnx_3630 1 1 5103 5104 prim::ListConstruct pnnx_3631 5 1 5090 5098 5074 5075 5104 5105 prim::Constant pnnx_3633 0 1 21119 value=2 prim::Constant pnnx_3634 0 1 21120 value=0 prim::Constant pnnx_3635 0 1 21121 value=3 prim::Constant pnnx_3636 0 1 21122 value=1 prim::ListConstruct pnnx_3637 5 1 21119 21120 21121 21122 5076 5107 Tensor.reshape Tensor.reshape_468 2 1 5102 5105 5106 $input=5102 $shape=5105 #5102=(36,64,576)f32 #5106=(36,64,3,6,32)f32 prim::Constant pnnx_3639 0 1 21123 value=0 prim::Constant pnnx_3640 0 1 21124 value=0 prim::Constant pnnx_3642 0 1 21125 value=0 prim::Constant pnnx_3643 0 1 21126 value=1 prim::Constant pnnx_3645 0 1 21127 value=0 prim::Constant pnnx_3646 0 1 21128 value=2 torch.permute torch.permute_2597 2 1 5106 5107 qkv0.37 $input=5106 $dims=5107 #5106=(36,64,3,6,32)f32 #qkv0.37=(3,36,6,64,32)f32 Tensor.select Tensor.select_701 3 1 qkv0.37 21123 21124 q.37 $input=qkv0.37 $dim=21123 $index=21124 #qkv0.37=(3,36,6,64,32)f32 #q.37=(36,6,64,32)f32 aten::mul pnnx_3648 2 1 q.37 5068 q0.37 #q.37=(36,6,64,32)f32 #q0.37=(36,6,64,32)f32 Tensor.select Tensor.select_702 3 1 qkv0.37 21125 21126 k.37 $input=qkv0.37 $dim=21125 $index=21126 #qkv0.37=(3,36,6,64,32)f32 #k.37=(36,6,64,32)f32 prim::Constant pnnx_3651 0 1 21129 value=-1 prim::ListConstruct pnnx_3652 1 1 21129 5115 Tensor.view Tensor.view_1166 2 1 relative_position_index.37 5115 5116 $input=relative_position_index.37 $shape=5115 #relative_position_index.37=(64,64)i64 #5116=(4096)i64 prim::ListConstruct pnnx_3654 1 1 5116 5117 #5116=(4096)i64 prim::Constant pnnx_3656 0 1 21130 value=64 prim::Constant pnnx_3657 0 1 21131 value=-1 prim::ListConstruct pnnx_3658 3 1 5079 21130 21131 5119 Tensor.index Tensor.index_343 2 1 relative_position_bias_table.37 5117 5118 $input=relative_position_bias_table.37 $expr=5117 #relative_position_bias_table.37=(225,6)f32 #5118=(4096,6)f32 prim::Constant pnnx_3660 0 1 21132 value=2 prim::Constant pnnx_3661 0 1 21133 value=0 prim::Constant pnnx_3662 0 1 21134 value=1 prim::ListConstruct pnnx_3663 3 1 21132 21133 21134 5121 Tensor.view Tensor.view_1167 2 1 5118 5119 relative_position_bias.37 $input=5118 $shape=5119 #5118=(4096,6)f32 #relative_position_bias.37=(64,64,6)f32 prim::Constant pnnx_3667 0 1 21136 value=0 torch.permute torch.permute_2598 2 1 relative_position_bias.37 5121 5122 $input=relative_position_bias.37 $dims=5121 #relative_position_bias.37=(64,64,6)f32 #5122=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_54 1 1 5122 relative_position_bias0.37 memory_format=torch.contiguous_format $input=5122 #5122=(6,64,64)f32 #relative_position_bias0.37=(6,64,64)f32 prim::Constant pnnx_3669 0 1 21137 value=1 torch.transpose torch.transpose_2999 3 1 k.37 5077 5078 5113 $input=k.37 $dim0=5077 $dim1=5078 #k.37=(36,6,64,32)f32 #5113=(36,6,32,64)f32 torch.matmul torch.matmul_2238 2 1 q0.37 5113 attn.75 $input=q0.37 $other=5113 #q0.37=(36,6,64,32)f32 #5113=(36,6,32,64)f32 #attn.75=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3251 2 1 relative_position_bias0.37 21136 5124 $input=relative_position_bias0.37 $dim=21136 #relative_position_bias0.37=(6,64,64)f32 #5124=(1,6,64,64)f32 aten::add pnnx_3670 3 1 attn.75 5124 21137 attn0.19 #attn.75=(36,6,64,64)f32 #5124=(1,6,64,64)f32 #attn0.19=(36,6,64,64)f32 prim::Constant pnnx_3671 0 1 21138 value=0 aten::size pnnx_3672 2 1 attn_mask.19 21138 5126 #attn_mask.19=(36,64,64)f32 prim::NumToTensor pnnx_3673 1 1 5126 other.19 aten::Int pnnx_3674 1 1 other.19 5128 prim::Constant pnnx_3675 0 1 21139 value=trunc aten::div pnnx_3676 3 1 B_.37 other.19 21139 5129 aten::Int pnnx_3677 1 1 5129 5130 prim::Constant pnnx_3678 0 1 21140 value=6 prim::ListConstruct pnnx_3679 5 1 5130 5128 21140 5097 5096 5131 prim::Constant pnnx_3681 0 1 21141 value=1 prim::Constant pnnx_3683 0 1 21142 value=0 prim::Constant pnnx_3685 0 1 21143 value=1 Tensor.view Tensor.view_1168 2 1 attn0.19 5131 5132 $input=attn0.19 $shape=5131 #attn0.19=(36,6,64,64)f32 #5132=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3252 2 1 attn_mask.19 21141 5133 $input=attn_mask.19 $dim=21141 #attn_mask.19=(36,64,64)f32 #5133=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3253 2 1 5133 21142 5134 $input=5133 $dim=21142 #5133=(36,1,64,64)f32 #5134=(1,36,1,64,64)f32 aten::add pnnx_3686 3 1 5132 5134 21143 attn1.19 #5132=(1,36,6,64,64)f32 #5134=(1,36,1,64,64)f32 #attn1.19=(1,36,6,64,64)f32 prim::Constant pnnx_3687 0 1 21144 value=-1 prim::Constant pnnx_3688 0 1 21145 value=6 prim::ListConstruct pnnx_3689 4 1 21144 21145 5095 5094 5136 Tensor.view Tensor.view_1169 2 1 attn1.19 5136 input.83 $input=attn1.19 $shape=5136 #attn1.19=(1,36,6,64,64)f32 #input.83=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.5.attn.softmax 1 1 input.83 5138 dim=-1 #input.83=(36,6,64,64)f32 #5138=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.attn.attn_drop 1 1 5138 5139 #5138=(36,6,64,64)f32 #5139=(36,6,64,64)f32 Tensor.select Tensor.select_703 3 1 qkv0.37 21127 21128 v.37 $input=qkv0.37 $dim=21127 $index=21128 #qkv0.37=(3,36,6,64,32)f32 #v.37=(36,6,64,32)f32 prim::Constant pnnx_3692 0 1 21146 value=1 prim::Constant pnnx_3693 0 1 21147 value=2 torch.matmul torch.matmul_2239 2 1 5139 v.37 5140 $input=5139 $other=v.37 #5139=(36,6,64,64)f32 #v.37=(36,6,64,32)f32 #5140=(36,6,64,32)f32 prim::ListConstruct pnnx_3695 3 1 5089 5093 5101 5142 torch.transpose torch.transpose_3000 3 1 5140 21146 21147 5141 $input=5140 $dim0=21146 $dim1=21147 #5140=(36,6,64,32)f32 #5141=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_469 2 1 5141 5142 input0.39 $input=5141 $shape=5142 #5141=(36,64,6,32)f32 #input0.39=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.5.attn.proj 1 1 input0.39 5144 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.39=(36,64,192)f32 #5144=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.attn.proj_drop 1 1 5144 5145 #5144=(36,64,192)f32 #5145=(36,64,192)f32 prim::Constant pnnx_3697 0 1 21148 value=-1 prim::Constant pnnx_3698 0 1 21149 value=8 prim::Constant pnnx_3699 0 1 21150 value=8 prim::ListConstruct pnnx_3700 4 1 21148 21149 21150 5035 5146 prim::Constant pnnx_3702 0 1 21151 value=8 prim::Constant pnnx_3703 0 1 21152 value=trunc aten::div pnnx_3704 3 1 H.1 21151 21152 5148 aten::Int pnnx_3705 1 1 5148 5149 prim::Constant pnnx_3706 0 1 21153 value=8 prim::Constant pnnx_3707 0 1 21154 value=trunc aten::div pnnx_3708 3 1 W.1 21153 21154 5150 aten::Int pnnx_3709 1 1 5150 5151 prim::Constant pnnx_3710 0 1 21155 value=1 prim::Constant pnnx_3711 0 1 21156 value=8 prim::Constant pnnx_3712 0 1 21157 value=8 prim::Constant pnnx_3713 0 1 21158 value=-1 prim::ListConstruct pnnx_3714 6 1 21155 5149 5151 21156 21157 21158 5152 prim::Constant pnnx_3716 0 1 21159 value=0 prim::Constant pnnx_3717 0 1 21160 value=1 prim::Constant pnnx_3718 0 1 21161 value=3 prim::Constant pnnx_3719 0 1 21162 value=2 prim::Constant pnnx_3720 0 1 21163 value=4 prim::Constant pnnx_3721 0 1 21164 value=5 prim::ListConstruct pnnx_3722 6 1 21159 21160 21161 21162 21163 21164 5154 Tensor.view Tensor.view_1170 2 1 5145 5146 windows.37 $input=5145 $shape=5146 #5145=(36,64,192)f32 #windows.37=(36,8,8,192)f32 Tensor.view Tensor.view_1171 2 1 windows.37 5152 x3.37 $input=windows.37 $shape=5152 #windows.37=(36,8,8,192)f32 #x3.37=(1,6,6,8,8,192)f32 prim::Constant pnnx_3726 0 1 21166 value=1 prim::Constant pnnx_3727 0 1 21167 value=-1 prim::ListConstruct pnnx_3728 4 1 21166 286 526 21167 5157 torch.permute torch.permute_2599 2 1 x3.37 5154 5155 $input=x3.37 $dims=5154 #x3.37=(1,6,6,8,8,192)f32 #5155=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_55 1 1 5155 5156 memory_format=torch.contiguous_format $input=5155 #5155=(1,6,8,6,8,192)f32 #5156=(1,6,8,6,8,192)f32 prim::Constant pnnx_3730 0 1 21168 value=4 prim::Constant pnnx_3731 0 1 21169 value=4 prim::ListConstruct pnnx_3732 2 1 21168 21169 5159 prim::Constant pnnx_3733 0 1 21170 value=1 prim::Constant pnnx_3734 0 1 21171 value=2 prim::ListConstruct pnnx_3735 2 1 21170 21171 5160 Tensor.view Tensor.view_1172 2 1 5156 5157 shifted_x.19 $input=5156 $shape=5157 #5156=(1,6,8,6,8,192)f32 #shifted_x.19=(1,48,48,192)f32 aten::mul pnnx_3737 2 1 H.1 W.1 5162 aten::Int pnnx_3738 1 1 5162 5163 prim::ListConstruct pnnx_3739 3 1 5030 5163 5034 5164 prim::Constant pnnx_3741 0 1 5166 value=None prim::Constant pnnx_3742 0 1 21172 value=1 torch.roll torch.roll_2437 3 1 shifted_x.19 5159 5160 x4.37 $input=shifted_x.19 $shifts=5159 $dims=5160 #shifted_x.19=(1,48,48,192)f32 #x4.37=(1,48,48,192)f32 Tensor.view Tensor.view_1173 2 1 x4.37 5164 x5.19 $input=x4.37 $shape=5164 #x4.37=(1,48,48,192)f32 #x5.19=(1,2304,192)f32 aten::add pnnx_3743 3 1 5009 x5.19 21172 input.85 #5009=(1,2304,192)f32 #x5.19=(1,2304,192)f32 #input.85=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.5.norm2 1 1 input.85 5168 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.85=(1,2304,192)f32 #5168=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.5.mlp.fc1 1 1 5168 5173 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #5168=(1,2304,192)f32 #5173=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.5.mlp.act 1 1 5173 5174 #5173=(1,2304,384)f32 #5174=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.mlp.drop 1 1 5174 5175 #5174=(1,2304,384)f32 #5175=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.5.mlp.fc2 1 1 5175 5176 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5175=(1,2304,384)f32 #5176=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.mlp.drop 1 1 5176 5177 #5176=(1,2304,192)f32 #5177=(1,2304,192)f32 prim::Constant pnnx_3744 0 1 5178 value=None prim::Constant pnnx_3745 0 1 21173 value=1 aten::add pnnx_3746 3 1 input.85 5177 21173 5179 #input.85=(1,2304,192)f32 #5177=(1,2304,192)f32 #5179=(1,2304,192)f32 prim::Constant pnnx_3747 0 1 5180 value=0 prim::Constant pnnx_3748 0 1 5181 value=1 prim::Constant pnnx_3749 0 1 5182 value=2 prim::Constant pnnx_3750 0 1 5183 value=192 aten::size pnnx_3751 2 1 5179 5180 5184 #5179=(1,2304,192)f32 prim::NumToTensor pnnx_3752 1 1 5184 B.45 aten::Int pnnx_3753 1 1 B.45 5186 prim::ListConstruct pnnx_3755 4 1 5186 5183 283 523 5188 torch.transpose torch.transpose_3001 3 1 5179 5181 5182 5187 $input=5179 $dim0=5181 $dim1=5182 #5179=(1,2304,192)f32 #5187=(1,192,2304)f32 Tensor.view Tensor.view_1174 2 1 5187 5188 input.87 $input=5187 $shape=5188 #5187=(1,192,2304)f32 #input.87=(1,192,48,48)f32 nn.Conv2d layers_dfe.2.conv 1 1 input.87 5190 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.87=(1,192,48,48)f32 #5190=(1,192,48,48)f32 prim::Constant pnnx_3757 0 1 5191 value=-1 prim::Constant pnnx_3758 0 1 5192 value=2 prim::Constant pnnx_3759 0 1 5193 value=1 prim::Constant pnnx_3761 0 1 21174 value=2 torch.flatten torch.flatten_2186 3 1 5190 5192 5191 5194 $input=5190 $start_dim=5192 $end_dim=5191 #5190=(1,192,48,48)f32 #5194=(1,192,2304)f32 torch.transpose torch.transpose_3002 3 1 5194 5193 21174 5195 $input=5194 $dim0=5193 $dim1=21174 #5194=(1,192,2304)f32 #5195=(1,2304,192)f32 aten::add pnnx_3763 3 1 5195 4214 4215 5196 #5195=(1,2304,192)f32 #4214=(1,2304,192)f32 #5196=(1,2304,192)f32 prim::Constant pnnx_3764 0 1 5197 value=1 prim::Constant pnnx_3765 0 1 5214 value=trunc prim::Constant pnnx_3766 0 1 5215 value=8 prim::Constant pnnx_3767 0 1 5216 value=0 prim::Constant pnnx_3768 0 1 5217 value=2 prim::Constant pnnx_3769 0 1 5218 value=1 prim::Constant pnnx_3770 0 1 5219 value=3 prim::Constant pnnx_3771 0 1 5220 value=8 prim::Constant pnnx_3772 0 1 5221 value=4 prim::Constant pnnx_3773 0 1 5222 value=5 prim::Constant pnnx_3774 0 1 5223 value=-1 prim::Constant pnnx_3775 0 1 5224 value=64 aten::size pnnx_3776 2 1 5196 5216 5230 #5196=(1,2304,192)f32 prim::NumToTensor pnnx_3777 1 1 5230 B.47 aten::Int pnnx_3778 1 1 B.47 5232 aten::Int pnnx_3779 1 1 B.47 5233 aten::size pnnx_3780 2 1 5196 5217 5234 #5196=(1,2304,192)f32 prim::NumToTensor pnnx_3781 1 1 5234 C.83 aten::Int pnnx_3782 1 1 C.83 5236 aten::Int pnnx_3783 1 1 C.83 5237 aten::Int pnnx_3784 1 1 C.83 5238 aten::Int pnnx_3785 1 1 C.83 5239 nn.LayerNorm layers_dfe.3.residual_group.blocks.0.norm1 1 1 5196 5240 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5196=(1,2304,192)f32 #5240=(1,2304,192)f32 prim::ListConstruct pnnx_3786 4 1 5233 280 520 5239 5241 prim::Constant pnnx_3788 0 1 21175 value=0 Tensor.view Tensor.view_1175 2 1 5240 5241 x.39 $input=5240 $shape=5241 #5240=(1,2304,192)f32 #x.39=(1,48,48,192)f32 aten::size pnnx_3789 2 1 x.39 21175 5243 #x.39=(1,48,48,192)f32 prim::NumToTensor pnnx_3790 1 1 5243 B0.39 aten::Int pnnx_3791 1 1 B0.39 5245 aten::size pnnx_3792 2 1 x.39 5218 5246 #x.39=(1,48,48,192)f32 prim::NumToTensor pnnx_3793 1 1 5246 5247 prim::Constant pnnx_3794 0 1 21176 value=2 aten::size pnnx_3795 2 1 x.39 21176 5248 #x.39=(1,48,48,192)f32 prim::NumToTensor pnnx_3796 1 1 5248 5249 aten::size pnnx_3797 2 1 x.39 5219 5250 #x.39=(1,48,48,192)f32 prim::NumToTensor pnnx_3798 1 1 5250 C0.39 aten::Int pnnx_3799 1 1 C0.39 5252 aten::Int pnnx_3800 1 1 C0.39 5253 aten::div pnnx_3801 3 1 5247 5215 5214 5254 aten::Int pnnx_3802 1 1 5254 5255 prim::Constant pnnx_3803 0 1 21177 value=8 prim::Constant pnnx_3804 0 1 21178 value=trunc aten::div pnnx_3805 3 1 5249 21177 21178 5256 aten::Int pnnx_3806 1 1 5256 5257 prim::Constant pnnx_3807 0 1 21179 value=8 prim::ListConstruct pnnx_3808 6 1 5245 5255 5220 5257 21179 5253 5258 prim::Constant pnnx_3810 0 1 21180 value=0 prim::Constant pnnx_3811 0 1 21181 value=1 prim::Constant pnnx_3812 0 1 21182 value=3 prim::Constant pnnx_3813 0 1 21183 value=2 prim::ListConstruct pnnx_3814 6 1 21180 21181 21182 21183 5221 5222 5260 Tensor.view Tensor.view_1176 2 1 x.39 5258 x0.39 $input=x.39 $shape=5258 #x.39=(1,48,48,192)f32 #x0.39=(1,6,8,6,8,192)f32 prim::Constant pnnx_3818 0 1 21185 value=8 prim::Constant pnnx_3819 0 1 21186 value=8 prim::ListConstruct pnnx_3820 4 1 5223 21185 21186 5252 5263 torch.permute torch.permute_2600 2 1 x0.39 5260 5261 $input=x0.39 $dims=5260 #x0.39=(1,6,8,6,8,192)f32 #5261=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_56 1 1 5261 5262 memory_format=torch.contiguous_format $input=5261 #5261=(1,6,6,8,8,192)f32 #5262=(1,6,6,8,8,192)f32 prim::Constant pnnx_3822 0 1 21187 value=-1 prim::ListConstruct pnnx_3823 3 1 21187 5224 5238 5265 prim::Constant pnnx_3825 0 1 5267 value=1.767767e-01 prim::Constant pnnx_3826 0 1 5268 value=trunc prim::Constant pnnx_3827 0 1 5269 value=6 prim::Constant pnnx_3828 0 1 5270 value=0 prim::Constant pnnx_3829 0 1 5271 value=1 prim::Constant pnnx_3830 0 1 5272 value=2 prim::Constant pnnx_3831 0 1 5273 value=3 prim::Constant pnnx_3832 0 1 5274 value=6 prim::Constant pnnx_3833 0 1 5275 value=4 prim::Constant pnnx_3834 0 1 5276 value=-2 prim::Constant pnnx_3835 0 1 5277 value=-1 prim::Constant pnnx_3836 0 1 5278 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.0.attn 0 1 relative_position_bias_table.39 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.39=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.0.attn 0 1 relative_position_index.39 @relative_position_index=(64,64)i64 #relative_position_index.39=(64,64)i64 Tensor.view Tensor.view_1177 2 1 5262 5263 x_windows.39 $input=5262 $shape=5263 #5262=(1,6,6,8,8,192)f32 #x_windows.39=(36,8,8,192)f32 Tensor.view Tensor.view_1178 2 1 x_windows.39 5265 x1.39 $input=x_windows.39 $shape=5265 #x_windows.39=(36,8,8,192)f32 #x1.39=(36,64,192)f32 aten::size pnnx_3837 2 1 x1.39 5270 5286 #x1.39=(36,64,192)f32 prim::NumToTensor pnnx_3838 1 1 5286 B_.39 aten::Int pnnx_3839 1 1 B_.39 5288 aten::Int pnnx_3840 1 1 B_.39 5289 aten::size pnnx_3841 2 1 x1.39 5271 5290 #x1.39=(36,64,192)f32 prim::NumToTensor pnnx_3842 1 1 5290 N.39 aten::Int pnnx_3843 1 1 N.39 5292 aten::Int pnnx_3844 1 1 N.39 5293 aten::size pnnx_3845 2 1 x1.39 5272 5294 #x1.39=(36,64,192)f32 prim::NumToTensor pnnx_3846 1 1 5294 C.85 aten::Int pnnx_3847 1 1 C.85 5296 nn.Linear layers_dfe.3.residual_group.blocks.0.attn.qkv 1 1 x1.39 5297 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.39=(36,64,192)f32 #5297=(36,64,576)f32 aten::div pnnx_3848 3 1 C.85 5269 5268 5298 aten::Int pnnx_3849 1 1 5298 5299 prim::ListConstruct pnnx_3850 5 1 5289 5293 5273 5274 5299 5300 prim::Constant pnnx_3852 0 1 21188 value=2 prim::Constant pnnx_3853 0 1 21189 value=0 prim::Constant pnnx_3854 0 1 21190 value=3 prim::Constant pnnx_3855 0 1 21191 value=1 prim::ListConstruct pnnx_3856 5 1 21188 21189 21190 21191 5275 5302 Tensor.reshape Tensor.reshape_470 2 1 5297 5300 5301 $input=5297 $shape=5300 #5297=(36,64,576)f32 #5301=(36,64,3,6,32)f32 prim::Constant pnnx_3858 0 1 21192 value=0 prim::Constant pnnx_3859 0 1 21193 value=0 prim::Constant pnnx_3861 0 1 21194 value=0 prim::Constant pnnx_3862 0 1 21195 value=1 prim::Constant pnnx_3864 0 1 21196 value=0 prim::Constant pnnx_3865 0 1 21197 value=2 torch.permute torch.permute_2601 2 1 5301 5302 qkv0.39 $input=5301 $dims=5302 #5301=(36,64,3,6,32)f32 #qkv0.39=(3,36,6,64,32)f32 Tensor.select Tensor.select_704 3 1 qkv0.39 21192 21193 q.39 $input=qkv0.39 $dim=21192 $index=21193 #qkv0.39=(3,36,6,64,32)f32 #q.39=(36,6,64,32)f32 aten::mul pnnx_3867 2 1 q.39 5267 q0.39 #q.39=(36,6,64,32)f32 #q0.39=(36,6,64,32)f32 Tensor.select Tensor.select_705 3 1 qkv0.39 21194 21195 k.39 $input=qkv0.39 $dim=21194 $index=21195 #qkv0.39=(3,36,6,64,32)f32 #k.39=(36,6,64,32)f32 prim::Constant pnnx_3870 0 1 21198 value=-1 prim::ListConstruct pnnx_3871 1 1 21198 5310 Tensor.view Tensor.view_1179 2 1 relative_position_index.39 5310 5311 $input=relative_position_index.39 $shape=5310 #relative_position_index.39=(64,64)i64 #5311=(4096)i64 prim::ListConstruct pnnx_3873 1 1 5311 5312 #5311=(4096)i64 prim::Constant pnnx_3875 0 1 21199 value=64 prim::Constant pnnx_3876 0 1 21200 value=-1 prim::ListConstruct pnnx_3877 3 1 5278 21199 21200 5314 Tensor.index Tensor.index_344 2 1 relative_position_bias_table.39 5312 5313 $input=relative_position_bias_table.39 $expr=5312 #relative_position_bias_table.39=(225,6)f32 #5313=(4096,6)f32 prim::Constant pnnx_3879 0 1 21201 value=2 prim::Constant pnnx_3880 0 1 21202 value=0 prim::Constant pnnx_3881 0 1 21203 value=1 prim::ListConstruct pnnx_3882 3 1 21201 21202 21203 5316 Tensor.view Tensor.view_1180 2 1 5313 5314 relative_position_bias.39 $input=5313 $shape=5314 #5313=(4096,6)f32 #relative_position_bias.39=(64,64,6)f32 prim::Constant pnnx_3886 0 1 21205 value=0 torch.permute torch.permute_2602 2 1 relative_position_bias.39 5316 5317 $input=relative_position_bias.39 $dims=5316 #relative_position_bias.39=(64,64,6)f32 #5317=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_57 1 1 5317 relative_position_bias0.39 memory_format=torch.contiguous_format $input=5317 #5317=(6,64,64)f32 #relative_position_bias0.39=(6,64,64)f32 prim::Constant pnnx_3888 0 1 21206 value=1 torch.transpose torch.transpose_3003 3 1 k.39 5276 5277 5308 $input=k.39 $dim0=5276 $dim1=5277 #k.39=(36,6,64,32)f32 #5308=(36,6,32,64)f32 torch.matmul torch.matmul_2240 2 1 q0.39 5308 attn.79 $input=q0.39 $other=5308 #q0.39=(36,6,64,32)f32 #5308=(36,6,32,64)f32 #attn.79=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3254 2 1 relative_position_bias0.39 21205 5319 $input=relative_position_bias0.39 $dim=21205 #relative_position_bias0.39=(6,64,64)f32 #5319=(1,6,64,64)f32 aten::add pnnx_3889 3 1 attn.79 5319 21206 input.89 #attn.79=(36,6,64,64)f32 #5319=(1,6,64,64)f32 #input.89=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.0.attn.softmax 1 1 input.89 5321 dim=-1 #input.89=(36,6,64,64)f32 #5321=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.attn.attn_drop 1 1 5321 5322 #5321=(36,6,64,64)f32 #5322=(36,6,64,64)f32 Tensor.select Tensor.select_706 3 1 qkv0.39 21196 21197 v.39 $input=qkv0.39 $dim=21196 $index=21197 #qkv0.39=(3,36,6,64,32)f32 #v.39=(36,6,64,32)f32 prim::Constant pnnx_3891 0 1 21207 value=1 prim::Constant pnnx_3892 0 1 21208 value=2 torch.matmul torch.matmul_2241 2 1 5322 v.39 5323 $input=5322 $other=v.39 #5322=(36,6,64,64)f32 #v.39=(36,6,64,32)f32 #5323=(36,6,64,32)f32 prim::ListConstruct pnnx_3894 3 1 5288 5292 5296 5325 torch.transpose torch.transpose_3004 3 1 5323 21207 21208 5324 $input=5323 $dim0=21207 $dim1=21208 #5323=(36,6,64,32)f32 #5324=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_471 2 1 5324 5325 input0.41 $input=5324 $shape=5325 #5324=(36,64,6,32)f32 #input0.41=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.0.attn.proj 1 1 input0.41 5327 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.41=(36,64,192)f32 #5327=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.attn.proj_drop 1 1 5327 5328 #5327=(36,64,192)f32 #5328=(36,64,192)f32 prim::Constant pnnx_3896 0 1 21209 value=-1 prim::Constant pnnx_3897 0 1 21210 value=8 prim::Constant pnnx_3898 0 1 21211 value=8 prim::ListConstruct pnnx_3899 4 1 21209 21210 21211 5237 5329 prim::Constant pnnx_3901 0 1 21212 value=8 prim::Constant pnnx_3902 0 1 21213 value=trunc aten::div pnnx_3903 3 1 H.1 21212 21213 5331 aten::Int pnnx_3904 1 1 5331 5332 prim::Constant pnnx_3905 0 1 21214 value=8 prim::Constant pnnx_3906 0 1 21215 value=trunc aten::div pnnx_3907 3 1 W.1 21214 21215 5333 aten::Int pnnx_3908 1 1 5333 5334 prim::Constant pnnx_3909 0 1 21216 value=1 prim::Constant pnnx_3910 0 1 21217 value=8 prim::Constant pnnx_3911 0 1 21218 value=8 prim::Constant pnnx_3912 0 1 21219 value=-1 prim::ListConstruct pnnx_3913 6 1 21216 5332 5334 21217 21218 21219 5335 prim::Constant pnnx_3915 0 1 21220 value=0 prim::Constant pnnx_3916 0 1 21221 value=1 prim::Constant pnnx_3917 0 1 21222 value=3 prim::Constant pnnx_3918 0 1 21223 value=2 prim::Constant pnnx_3919 0 1 21224 value=4 prim::Constant pnnx_3920 0 1 21225 value=5 prim::ListConstruct pnnx_3921 6 1 21220 21221 21222 21223 21224 21225 5337 Tensor.view Tensor.view_1181 2 1 5328 5329 windows.39 $input=5328 $shape=5329 #5328=(36,64,192)f32 #windows.39=(36,8,8,192)f32 Tensor.view Tensor.view_1182 2 1 windows.39 5335 x2.39 $input=windows.39 $shape=5335 #windows.39=(36,8,8,192)f32 #x2.39=(1,6,6,8,8,192)f32 prim::Constant pnnx_3925 0 1 21227 value=1 prim::Constant pnnx_3926 0 1 21228 value=-1 prim::ListConstruct pnnx_3927 4 1 21227 277 517 21228 5340 torch.permute torch.permute_2603 2 1 x2.39 5337 5338 $input=x2.39 $dims=5337 #x2.39=(1,6,6,8,8,192)f32 #5338=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_58 1 1 5338 5339 memory_format=torch.contiguous_format $input=5338 #5338=(1,6,8,6,8,192)f32 #5339=(1,6,8,6,8,192)f32 aten::mul pnnx_3929 2 1 H.1 W.1 5342 aten::Int pnnx_3930 1 1 5342 5343 prim::ListConstruct pnnx_3931 3 1 5232 5343 5236 5344 prim::Constant pnnx_3933 0 1 5346 value=None prim::Constant pnnx_3934 0 1 21229 value=1 Tensor.view Tensor.view_1183 2 1 5339 5340 x3.39 $input=5339 $shape=5340 #5339=(1,6,8,6,8,192)f32 #x3.39=(1,48,48,192)f32 Tensor.view Tensor.view_1184 2 1 x3.39 5344 x4.39 $input=x3.39 $shape=5344 #x3.39=(1,48,48,192)f32 #x4.39=(1,2304,192)f32 aten::add pnnx_3935 3 1 5196 x4.39 21229 input.91 #5196=(1,2304,192)f32 #x4.39=(1,2304,192)f32 #input.91=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.0.norm2 1 1 input.91 5348 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.91=(1,2304,192)f32 #5348=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.0.mlp.fc1 1 1 5348 5353 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #5348=(1,2304,192)f32 #5353=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.0.mlp.act 1 1 5353 5354 #5353=(1,2304,384)f32 #5354=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.mlp.drop 1 1 5354 5355 #5354=(1,2304,384)f32 #5355=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.0.mlp.fc2 1 1 5355 5356 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5355=(1,2304,384)f32 #5356=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.mlp.drop 1 1 5356 5357 #5356=(1,2304,192)f32 #5357=(1,2304,192)f32 prim::Constant pnnx_3936 0 1 5358 value=None prim::Constant pnnx_3937 0 1 21230 value=1 aten::add pnnx_3938 3 1 input.91 5357 21230 5359 #input.91=(1,2304,192)f32 #5357=(1,2304,192)f32 #5359=(1,2304,192)f32 prim::Constant pnnx_3939 0 1 5360 value=trunc prim::Constant pnnx_3940 0 1 5361 value=8 prim::Constant pnnx_3941 0 1 5362 value=0 prim::Constant pnnx_3942 0 1 5363 value=2 prim::Constant pnnx_3943 0 1 5364 value=-4 prim::Constant pnnx_3944 0 1 5365 value=1 prim::Constant pnnx_3945 0 1 5366 value=3 prim::Constant pnnx_3946 0 1 5367 value=8 prim::Constant pnnx_3947 0 1 5368 value=4 prim::Constant pnnx_3948 0 1 5369 value=5 prim::Constant pnnx_3949 0 1 5370 value=-1 prim::Constant pnnx_3950 0 1 5371 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.1 0 1 attn_mask.21 @attn_mask=(36,64,64)f32 #attn_mask.21=(36,64,64)f32 aten::size pnnx_3951 2 1 5359 5362 5378 #5359=(1,2304,192)f32 prim::NumToTensor pnnx_3952 1 1 5378 B.49 aten::Int pnnx_3953 1 1 B.49 5380 aten::Int pnnx_3954 1 1 B.49 5381 aten::size pnnx_3955 2 1 5359 5363 5382 #5359=(1,2304,192)f32 prim::NumToTensor pnnx_3956 1 1 5382 C.87 aten::Int pnnx_3957 1 1 C.87 5384 aten::Int pnnx_3958 1 1 C.87 5385 aten::Int pnnx_3959 1 1 C.87 5386 aten::Int pnnx_3960 1 1 C.87 5387 nn.LayerNorm layers_dfe.3.residual_group.blocks.1.norm1 1 1 5359 5388 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5359=(1,2304,192)f32 #5388=(1,2304,192)f32 prim::ListConstruct pnnx_3961 4 1 5381 274 514 5387 5389 prim::Constant pnnx_3963 0 1 21231 value=-4 prim::ListConstruct pnnx_3964 2 1 5364 21231 5391 prim::Constant pnnx_3965 0 1 21232 value=2 prim::ListConstruct pnnx_3966 2 1 5365 21232 5392 Tensor.view Tensor.view_1185 2 1 5388 5389 x.41 $input=5388 $shape=5389 #5388=(1,2304,192)f32 #x.41=(1,48,48,192)f32 prim::Constant pnnx_3968 0 1 21233 value=0 torch.roll torch.roll_2438 3 1 x.41 5391 5392 x0.41 $input=x.41 $shifts=5391 $dims=5392 #x.41=(1,48,48,192)f32 #x0.41=(1,48,48,192)f32 aten::size pnnx_3969 2 1 x0.41 21233 5394 #x0.41=(1,48,48,192)f32 prim::NumToTensor pnnx_3970 1 1 5394 B0.41 aten::Int pnnx_3971 1 1 B0.41 5396 prim::Constant pnnx_3972 0 1 21234 value=1 aten::size pnnx_3973 2 1 x0.41 21234 5397 #x0.41=(1,48,48,192)f32 prim::NumToTensor pnnx_3974 1 1 5397 5398 prim::Constant pnnx_3975 0 1 21235 value=2 aten::size pnnx_3976 2 1 x0.41 21235 5399 #x0.41=(1,48,48,192)f32 prim::NumToTensor pnnx_3977 1 1 5399 5400 aten::size pnnx_3978 2 1 x0.41 5366 5401 #x0.41=(1,48,48,192)f32 prim::NumToTensor pnnx_3979 1 1 5401 C0.41 aten::Int pnnx_3980 1 1 C0.41 5403 aten::Int pnnx_3981 1 1 C0.41 5404 aten::div pnnx_3982 3 1 5398 5361 5360 5405 aten::Int pnnx_3983 1 1 5405 5406 prim::Constant pnnx_3984 0 1 21236 value=8 prim::Constant pnnx_3985 0 1 21237 value=trunc aten::div pnnx_3986 3 1 5400 21236 21237 5407 aten::Int pnnx_3987 1 1 5407 5408 prim::Constant pnnx_3988 0 1 21238 value=8 prim::ListConstruct pnnx_3989 6 1 5396 5406 5367 5408 21238 5404 5409 prim::Constant pnnx_3991 0 1 21239 value=0 prim::Constant pnnx_3992 0 1 21240 value=1 prim::Constant pnnx_3993 0 1 21241 value=3 prim::Constant pnnx_3994 0 1 21242 value=2 prim::ListConstruct pnnx_3995 6 1 21239 21240 21241 21242 5368 5369 5411 Tensor.view Tensor.view_1186 2 1 x0.41 5409 x1.41 $input=x0.41 $shape=5409 #x0.41=(1,48,48,192)f32 #x1.41=(1,6,8,6,8,192)f32 prim::Constant pnnx_3999 0 1 21244 value=8 prim::Constant pnnx_4000 0 1 21245 value=8 prim::ListConstruct pnnx_4001 4 1 5370 21244 21245 5403 5414 torch.permute torch.permute_2604 2 1 x1.41 5411 5412 $input=x1.41 $dims=5411 #x1.41=(1,6,8,6,8,192)f32 #5412=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_59 1 1 5412 5413 memory_format=torch.contiguous_format $input=5412 #5412=(1,6,6,8,8,192)f32 #5413=(1,6,6,8,8,192)f32 prim::Constant pnnx_4003 0 1 21246 value=-1 prim::ListConstruct pnnx_4004 3 1 21246 5371 5386 5416 prim::Constant pnnx_4006 0 1 5418 value=1.767767e-01 prim::Constant pnnx_4007 0 1 5419 value=trunc prim::Constant pnnx_4008 0 1 5420 value=6 prim::Constant pnnx_4009 0 1 5421 value=0 prim::Constant pnnx_4010 0 1 5422 value=1 prim::Constant pnnx_4011 0 1 5423 value=2 prim::Constant pnnx_4012 0 1 5424 value=3 prim::Constant pnnx_4013 0 1 5425 value=6 prim::Constant pnnx_4014 0 1 5426 value=4 prim::Constant pnnx_4015 0 1 5427 value=-2 prim::Constant pnnx_4016 0 1 5428 value=-1 prim::Constant pnnx_4017 0 1 5429 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.1.attn 0 1 relative_position_bias_table.41 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.41=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.1.attn 0 1 relative_position_index.41 @relative_position_index=(64,64)i64 #relative_position_index.41=(64,64)i64 Tensor.view Tensor.view_1187 2 1 5413 5414 x_windows.41 $input=5413 $shape=5414 #5413=(1,6,6,8,8,192)f32 #x_windows.41=(36,8,8,192)f32 Tensor.view Tensor.view_1188 2 1 x_windows.41 5416 x2.41 $input=x_windows.41 $shape=5416 #x_windows.41=(36,8,8,192)f32 #x2.41=(36,64,192)f32 aten::size pnnx_4018 2 1 x2.41 5421 5437 #x2.41=(36,64,192)f32 prim::NumToTensor pnnx_4019 1 1 5437 B_.41 aten::Int pnnx_4020 1 1 B_.41 5439 aten::Int pnnx_4021 1 1 B_.41 5440 aten::size pnnx_4022 2 1 x2.41 5422 5441 #x2.41=(36,64,192)f32 prim::NumToTensor pnnx_4023 1 1 5441 N.41 aten::Int pnnx_4024 1 1 N.41 5443 aten::Int pnnx_4025 1 1 N.41 5444 aten::Int pnnx_4026 1 1 N.41 5445 aten::Int pnnx_4027 1 1 N.41 5446 aten::Int pnnx_4028 1 1 N.41 5447 aten::Int pnnx_4029 1 1 N.41 5448 aten::size pnnx_4030 2 1 x2.41 5423 5449 #x2.41=(36,64,192)f32 prim::NumToTensor pnnx_4031 1 1 5449 C.89 aten::Int pnnx_4032 1 1 C.89 5451 nn.Linear layers_dfe.3.residual_group.blocks.1.attn.qkv 1 1 x2.41 5452 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.41=(36,64,192)f32 #5452=(36,64,576)f32 aten::div pnnx_4033 3 1 C.89 5420 5419 5453 aten::Int pnnx_4034 1 1 5453 5454 prim::ListConstruct pnnx_4035 5 1 5440 5448 5424 5425 5454 5455 prim::Constant pnnx_4037 0 1 21247 value=2 prim::Constant pnnx_4038 0 1 21248 value=0 prim::Constant pnnx_4039 0 1 21249 value=3 prim::Constant pnnx_4040 0 1 21250 value=1 prim::ListConstruct pnnx_4041 5 1 21247 21248 21249 21250 5426 5457 Tensor.reshape Tensor.reshape_472 2 1 5452 5455 5456 $input=5452 $shape=5455 #5452=(36,64,576)f32 #5456=(36,64,3,6,32)f32 prim::Constant pnnx_4043 0 1 21251 value=0 prim::Constant pnnx_4044 0 1 21252 value=0 prim::Constant pnnx_4046 0 1 21253 value=0 prim::Constant pnnx_4047 0 1 21254 value=1 prim::Constant pnnx_4049 0 1 21255 value=0 prim::Constant pnnx_4050 0 1 21256 value=2 torch.permute torch.permute_2605 2 1 5456 5457 qkv0.41 $input=5456 $dims=5457 #5456=(36,64,3,6,32)f32 #qkv0.41=(3,36,6,64,32)f32 Tensor.select Tensor.select_707 3 1 qkv0.41 21251 21252 q.41 $input=qkv0.41 $dim=21251 $index=21252 #qkv0.41=(3,36,6,64,32)f32 #q.41=(36,6,64,32)f32 aten::mul pnnx_4052 2 1 q.41 5418 q0.41 #q.41=(36,6,64,32)f32 #q0.41=(36,6,64,32)f32 Tensor.select Tensor.select_708 3 1 qkv0.41 21253 21254 k.41 $input=qkv0.41 $dim=21253 $index=21254 #qkv0.41=(3,36,6,64,32)f32 #k.41=(36,6,64,32)f32 prim::Constant pnnx_4055 0 1 21257 value=-1 prim::ListConstruct pnnx_4056 1 1 21257 5465 Tensor.view Tensor.view_1189 2 1 relative_position_index.41 5465 5466 $input=relative_position_index.41 $shape=5465 #relative_position_index.41=(64,64)i64 #5466=(4096)i64 prim::ListConstruct pnnx_4058 1 1 5466 5467 #5466=(4096)i64 prim::Constant pnnx_4060 0 1 21258 value=64 prim::Constant pnnx_4061 0 1 21259 value=-1 prim::ListConstruct pnnx_4062 3 1 5429 21258 21259 5469 Tensor.index Tensor.index_345 2 1 relative_position_bias_table.41 5467 5468 $input=relative_position_bias_table.41 $expr=5467 #relative_position_bias_table.41=(225,6)f32 #5468=(4096,6)f32 prim::Constant pnnx_4064 0 1 21260 value=2 prim::Constant pnnx_4065 0 1 21261 value=0 prim::Constant pnnx_4066 0 1 21262 value=1 prim::ListConstruct pnnx_4067 3 1 21260 21261 21262 5471 Tensor.view Tensor.view_1190 2 1 5468 5469 relative_position_bias.41 $input=5468 $shape=5469 #5468=(4096,6)f32 #relative_position_bias.41=(64,64,6)f32 prim::Constant pnnx_4071 0 1 21264 value=0 torch.permute torch.permute_2606 2 1 relative_position_bias.41 5471 5472 $input=relative_position_bias.41 $dims=5471 #relative_position_bias.41=(64,64,6)f32 #5472=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_60 1 1 5472 relative_position_bias0.41 memory_format=torch.contiguous_format $input=5472 #5472=(6,64,64)f32 #relative_position_bias0.41=(6,64,64)f32 prim::Constant pnnx_4073 0 1 21265 value=1 torch.transpose torch.transpose_3005 3 1 k.41 5427 5428 5463 $input=k.41 $dim0=5427 $dim1=5428 #k.41=(36,6,64,32)f32 #5463=(36,6,32,64)f32 torch.matmul torch.matmul_2242 2 1 q0.41 5463 attn.83 $input=q0.41 $other=5463 #q0.41=(36,6,64,32)f32 #5463=(36,6,32,64)f32 #attn.83=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3255 2 1 relative_position_bias0.41 21264 5474 $input=relative_position_bias0.41 $dim=21264 #relative_position_bias0.41=(6,64,64)f32 #5474=(1,6,64,64)f32 aten::add pnnx_4074 3 1 attn.83 5474 21265 attn0.21 #attn.83=(36,6,64,64)f32 #5474=(1,6,64,64)f32 #attn0.21=(36,6,64,64)f32 prim::Constant pnnx_4075 0 1 21266 value=0 aten::size pnnx_4076 2 1 attn_mask.21 21266 5476 #attn_mask.21=(36,64,64)f32 prim::NumToTensor pnnx_4077 1 1 5476 other.21 aten::Int pnnx_4078 1 1 other.21 5478 prim::Constant pnnx_4079 0 1 21267 value=trunc aten::div pnnx_4080 3 1 B_.41 other.21 21267 5479 aten::Int pnnx_4081 1 1 5479 5480 prim::Constant pnnx_4082 0 1 21268 value=6 prim::ListConstruct pnnx_4083 5 1 5480 5478 21268 5447 5446 5481 prim::Constant pnnx_4085 0 1 21269 value=1 prim::Constant pnnx_4087 0 1 21270 value=0 prim::Constant pnnx_4089 0 1 21271 value=1 Tensor.view Tensor.view_1191 2 1 attn0.21 5481 5482 $input=attn0.21 $shape=5481 #attn0.21=(36,6,64,64)f32 #5482=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3256 2 1 attn_mask.21 21269 5483 $input=attn_mask.21 $dim=21269 #attn_mask.21=(36,64,64)f32 #5483=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3257 2 1 5483 21270 5484 $input=5483 $dim=21270 #5483=(36,1,64,64)f32 #5484=(1,36,1,64,64)f32 aten::add pnnx_4090 3 1 5482 5484 21271 attn1.21 #5482=(1,36,6,64,64)f32 #5484=(1,36,1,64,64)f32 #attn1.21=(1,36,6,64,64)f32 prim::Constant pnnx_4091 0 1 21272 value=-1 prim::Constant pnnx_4092 0 1 21273 value=6 prim::ListConstruct pnnx_4093 4 1 21272 21273 5445 5444 5486 Tensor.view Tensor.view_1192 2 1 attn1.21 5486 input.93 $input=attn1.21 $shape=5486 #attn1.21=(1,36,6,64,64)f32 #input.93=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.1.attn.softmax 1 1 input.93 5488 dim=-1 #input.93=(36,6,64,64)f32 #5488=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.attn.attn_drop 1 1 5488 5489 #5488=(36,6,64,64)f32 #5489=(36,6,64,64)f32 Tensor.select Tensor.select_709 3 1 qkv0.41 21255 21256 v.41 $input=qkv0.41 $dim=21255 $index=21256 #qkv0.41=(3,36,6,64,32)f32 #v.41=(36,6,64,32)f32 prim::Constant pnnx_4096 0 1 21274 value=1 prim::Constant pnnx_4097 0 1 21275 value=2 torch.matmul torch.matmul_2243 2 1 5489 v.41 5490 $input=5489 $other=v.41 #5489=(36,6,64,64)f32 #v.41=(36,6,64,32)f32 #5490=(36,6,64,32)f32 prim::ListConstruct pnnx_4099 3 1 5439 5443 5451 5492 torch.transpose torch.transpose_3006 3 1 5490 21274 21275 5491 $input=5490 $dim0=21274 $dim1=21275 #5490=(36,6,64,32)f32 #5491=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_473 2 1 5491 5492 input0.43 $input=5491 $shape=5492 #5491=(36,64,6,32)f32 #input0.43=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.1.attn.proj 1 1 input0.43 5494 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.43=(36,64,192)f32 #5494=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.attn.proj_drop 1 1 5494 5495 #5494=(36,64,192)f32 #5495=(36,64,192)f32 prim::Constant pnnx_4101 0 1 21276 value=-1 prim::Constant pnnx_4102 0 1 21277 value=8 prim::Constant pnnx_4103 0 1 21278 value=8 prim::ListConstruct pnnx_4104 4 1 21276 21277 21278 5385 5496 prim::Constant pnnx_4106 0 1 21279 value=8 prim::Constant pnnx_4107 0 1 21280 value=trunc aten::div pnnx_4108 3 1 H.1 21279 21280 5498 aten::Int pnnx_4109 1 1 5498 5499 prim::Constant pnnx_4110 0 1 21281 value=8 prim::Constant pnnx_4111 0 1 21282 value=trunc aten::div pnnx_4112 3 1 W.1 21281 21282 5500 aten::Int pnnx_4113 1 1 5500 5501 prim::Constant pnnx_4114 0 1 21283 value=1 prim::Constant pnnx_4115 0 1 21284 value=8 prim::Constant pnnx_4116 0 1 21285 value=8 prim::Constant pnnx_4117 0 1 21286 value=-1 prim::ListConstruct pnnx_4118 6 1 21283 5499 5501 21284 21285 21286 5502 prim::Constant pnnx_4120 0 1 21287 value=0 prim::Constant pnnx_4121 0 1 21288 value=1 prim::Constant pnnx_4122 0 1 21289 value=3 prim::Constant pnnx_4123 0 1 21290 value=2 prim::Constant pnnx_4124 0 1 21291 value=4 prim::Constant pnnx_4125 0 1 21292 value=5 prim::ListConstruct pnnx_4126 6 1 21287 21288 21289 21290 21291 21292 5504 Tensor.view Tensor.view_1193 2 1 5495 5496 windows.41 $input=5495 $shape=5496 #5495=(36,64,192)f32 #windows.41=(36,8,8,192)f32 Tensor.view Tensor.view_1194 2 1 windows.41 5502 x3.41 $input=windows.41 $shape=5502 #windows.41=(36,8,8,192)f32 #x3.41=(1,6,6,8,8,192)f32 prim::Constant pnnx_4130 0 1 21294 value=1 prim::Constant pnnx_4131 0 1 21295 value=-1 prim::ListConstruct pnnx_4132 4 1 21294 271 511 21295 5507 torch.permute torch.permute_2607 2 1 x3.41 5504 5505 $input=x3.41 $dims=5504 #x3.41=(1,6,6,8,8,192)f32 #5505=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_61 1 1 5505 5506 memory_format=torch.contiguous_format $input=5505 #5505=(1,6,8,6,8,192)f32 #5506=(1,6,8,6,8,192)f32 prim::Constant pnnx_4134 0 1 21296 value=4 prim::Constant pnnx_4135 0 1 21297 value=4 prim::ListConstruct pnnx_4136 2 1 21296 21297 5509 prim::Constant pnnx_4137 0 1 21298 value=1 prim::Constant pnnx_4138 0 1 21299 value=2 prim::ListConstruct pnnx_4139 2 1 21298 21299 5510 Tensor.view Tensor.view_1195 2 1 5506 5507 shifted_x.21 $input=5506 $shape=5507 #5506=(1,6,8,6,8,192)f32 #shifted_x.21=(1,48,48,192)f32 aten::mul pnnx_4141 2 1 H.1 W.1 5512 aten::Int pnnx_4142 1 1 5512 5513 prim::ListConstruct pnnx_4143 3 1 5380 5513 5384 5514 prim::Constant pnnx_4145 0 1 5516 value=None prim::Constant pnnx_4146 0 1 21300 value=1 torch.roll torch.roll_2439 3 1 shifted_x.21 5509 5510 x4.41 $input=shifted_x.21 $shifts=5509 $dims=5510 #shifted_x.21=(1,48,48,192)f32 #x4.41=(1,48,48,192)f32 Tensor.view Tensor.view_1196 2 1 x4.41 5514 x5.21 $input=x4.41 $shape=5514 #x4.41=(1,48,48,192)f32 #x5.21=(1,2304,192)f32 aten::add pnnx_4147 3 1 5359 x5.21 21300 input.95 #5359=(1,2304,192)f32 #x5.21=(1,2304,192)f32 #input.95=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.1.norm2 1 1 input.95 5518 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.95=(1,2304,192)f32 #5518=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.1.mlp.fc1 1 1 5518 5523 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #5518=(1,2304,192)f32 #5523=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.1.mlp.act 1 1 5523 5524 #5523=(1,2304,384)f32 #5524=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.mlp.drop 1 1 5524 5525 #5524=(1,2304,384)f32 #5525=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.1.mlp.fc2 1 1 5525 5526 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5525=(1,2304,384)f32 #5526=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.mlp.drop 1 1 5526 5527 #5526=(1,2304,192)f32 #5527=(1,2304,192)f32 prim::Constant pnnx_4148 0 1 5528 value=None prim::Constant pnnx_4149 0 1 21301 value=1 aten::add pnnx_4150 3 1 input.95 5527 21301 5529 #input.95=(1,2304,192)f32 #5527=(1,2304,192)f32 #5529=(1,2304,192)f32 prim::Constant pnnx_4151 0 1 5530 value=trunc prim::Constant pnnx_4152 0 1 5531 value=8 prim::Constant pnnx_4153 0 1 5532 value=0 prim::Constant pnnx_4154 0 1 5533 value=2 prim::Constant pnnx_4155 0 1 5534 value=1 prim::Constant pnnx_4156 0 1 5535 value=3 prim::Constant pnnx_4157 0 1 5536 value=8 prim::Constant pnnx_4158 0 1 5537 value=4 prim::Constant pnnx_4159 0 1 5538 value=5 prim::Constant pnnx_4160 0 1 5539 value=-1 prim::Constant pnnx_4161 0 1 5540 value=64 aten::size pnnx_4162 2 1 5529 5532 5546 #5529=(1,2304,192)f32 prim::NumToTensor pnnx_4163 1 1 5546 B.51 aten::Int pnnx_4164 1 1 B.51 5548 aten::Int pnnx_4165 1 1 B.51 5549 aten::size pnnx_4166 2 1 5529 5533 5550 #5529=(1,2304,192)f32 prim::NumToTensor pnnx_4167 1 1 5550 C.91 aten::Int pnnx_4168 1 1 C.91 5552 aten::Int pnnx_4169 1 1 C.91 5553 aten::Int pnnx_4170 1 1 C.91 5554 aten::Int pnnx_4171 1 1 C.91 5555 nn.LayerNorm layers_dfe.3.residual_group.blocks.2.norm1 1 1 5529 5556 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5529=(1,2304,192)f32 #5556=(1,2304,192)f32 prim::ListConstruct pnnx_4172 4 1 5549 268 508 5555 5557 prim::Constant pnnx_4174 0 1 21302 value=0 Tensor.view Tensor.view_1197 2 1 5556 5557 x.43 $input=5556 $shape=5557 #5556=(1,2304,192)f32 #x.43=(1,48,48,192)f32 aten::size pnnx_4175 2 1 x.43 21302 5559 #x.43=(1,48,48,192)f32 prim::NumToTensor pnnx_4176 1 1 5559 B0.43 aten::Int pnnx_4177 1 1 B0.43 5561 aten::size pnnx_4178 2 1 x.43 5534 5562 #x.43=(1,48,48,192)f32 prim::NumToTensor pnnx_4179 1 1 5562 5563 prim::Constant pnnx_4180 0 1 21303 value=2 aten::size pnnx_4181 2 1 x.43 21303 5564 #x.43=(1,48,48,192)f32 prim::NumToTensor pnnx_4182 1 1 5564 5565 aten::size pnnx_4183 2 1 x.43 5535 5566 #x.43=(1,48,48,192)f32 prim::NumToTensor pnnx_4184 1 1 5566 C0.43 aten::Int pnnx_4185 1 1 C0.43 5568 aten::Int pnnx_4186 1 1 C0.43 5569 aten::div pnnx_4187 3 1 5563 5531 5530 5570 aten::Int pnnx_4188 1 1 5570 5571 prim::Constant pnnx_4189 0 1 21304 value=8 prim::Constant pnnx_4190 0 1 21305 value=trunc aten::div pnnx_4191 3 1 5565 21304 21305 5572 aten::Int pnnx_4192 1 1 5572 5573 prim::Constant pnnx_4193 0 1 21306 value=8 prim::ListConstruct pnnx_4194 6 1 5561 5571 5536 5573 21306 5569 5574 prim::Constant pnnx_4196 0 1 21307 value=0 prim::Constant pnnx_4197 0 1 21308 value=1 prim::Constant pnnx_4198 0 1 21309 value=3 prim::Constant pnnx_4199 0 1 21310 value=2 prim::ListConstruct pnnx_4200 6 1 21307 21308 21309 21310 5537 5538 5576 Tensor.view Tensor.view_1198 2 1 x.43 5574 x0.43 $input=x.43 $shape=5574 #x.43=(1,48,48,192)f32 #x0.43=(1,6,8,6,8,192)f32 prim::Constant pnnx_4204 0 1 21312 value=8 prim::Constant pnnx_4205 0 1 21313 value=8 prim::ListConstruct pnnx_4206 4 1 5539 21312 21313 5568 5579 torch.permute torch.permute_2608 2 1 x0.43 5576 5577 $input=x0.43 $dims=5576 #x0.43=(1,6,8,6,8,192)f32 #5577=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_62 1 1 5577 5578 memory_format=torch.contiguous_format $input=5577 #5577=(1,6,6,8,8,192)f32 #5578=(1,6,6,8,8,192)f32 prim::Constant pnnx_4208 0 1 21314 value=-1 prim::ListConstruct pnnx_4209 3 1 21314 5540 5554 5581 prim::Constant pnnx_4211 0 1 5583 value=1.767767e-01 prim::Constant pnnx_4212 0 1 5584 value=trunc prim::Constant pnnx_4213 0 1 5585 value=6 prim::Constant pnnx_4214 0 1 5586 value=0 prim::Constant pnnx_4215 0 1 5587 value=1 prim::Constant pnnx_4216 0 1 5588 value=2 prim::Constant pnnx_4217 0 1 5589 value=3 prim::Constant pnnx_4218 0 1 5590 value=6 prim::Constant pnnx_4219 0 1 5591 value=4 prim::Constant pnnx_4220 0 1 5592 value=-2 prim::Constant pnnx_4221 0 1 5593 value=-1 prim::Constant pnnx_4222 0 1 5594 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.2.attn 0 1 relative_position_bias_table.43 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.43=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.2.attn 0 1 relative_position_index.43 @relative_position_index=(64,64)i64 #relative_position_index.43=(64,64)i64 Tensor.view Tensor.view_1199 2 1 5578 5579 x_windows.43 $input=5578 $shape=5579 #5578=(1,6,6,8,8,192)f32 #x_windows.43=(36,8,8,192)f32 Tensor.view Tensor.view_1200 2 1 x_windows.43 5581 x1.43 $input=x_windows.43 $shape=5581 #x_windows.43=(36,8,8,192)f32 #x1.43=(36,64,192)f32 aten::size pnnx_4223 2 1 x1.43 5586 5602 #x1.43=(36,64,192)f32 prim::NumToTensor pnnx_4224 1 1 5602 B_.43 aten::Int pnnx_4225 1 1 B_.43 5604 aten::Int pnnx_4226 1 1 B_.43 5605 aten::size pnnx_4227 2 1 x1.43 5587 5606 #x1.43=(36,64,192)f32 prim::NumToTensor pnnx_4228 1 1 5606 N.43 aten::Int pnnx_4229 1 1 N.43 5608 aten::Int pnnx_4230 1 1 N.43 5609 aten::size pnnx_4231 2 1 x1.43 5588 5610 #x1.43=(36,64,192)f32 prim::NumToTensor pnnx_4232 1 1 5610 C.93 aten::Int pnnx_4233 1 1 C.93 5612 nn.Linear layers_dfe.3.residual_group.blocks.2.attn.qkv 1 1 x1.43 5613 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.43=(36,64,192)f32 #5613=(36,64,576)f32 aten::div pnnx_4234 3 1 C.93 5585 5584 5614 aten::Int pnnx_4235 1 1 5614 5615 prim::ListConstruct pnnx_4236 5 1 5605 5609 5589 5590 5615 5616 prim::Constant pnnx_4238 0 1 21315 value=2 prim::Constant pnnx_4239 0 1 21316 value=0 prim::Constant pnnx_4240 0 1 21317 value=3 prim::Constant pnnx_4241 0 1 21318 value=1 prim::ListConstruct pnnx_4242 5 1 21315 21316 21317 21318 5591 5618 Tensor.reshape Tensor.reshape_474 2 1 5613 5616 5617 $input=5613 $shape=5616 #5613=(36,64,576)f32 #5617=(36,64,3,6,32)f32 prim::Constant pnnx_4244 0 1 21319 value=0 prim::Constant pnnx_4245 0 1 21320 value=0 prim::Constant pnnx_4247 0 1 21321 value=0 prim::Constant pnnx_4248 0 1 21322 value=1 prim::Constant pnnx_4250 0 1 21323 value=0 prim::Constant pnnx_4251 0 1 21324 value=2 torch.permute torch.permute_2609 2 1 5617 5618 qkv0.43 $input=5617 $dims=5618 #5617=(36,64,3,6,32)f32 #qkv0.43=(3,36,6,64,32)f32 Tensor.select Tensor.select_710 3 1 qkv0.43 21319 21320 q.43 $input=qkv0.43 $dim=21319 $index=21320 #qkv0.43=(3,36,6,64,32)f32 #q.43=(36,6,64,32)f32 aten::mul pnnx_4253 2 1 q.43 5583 q0.43 #q.43=(36,6,64,32)f32 #q0.43=(36,6,64,32)f32 Tensor.select Tensor.select_711 3 1 qkv0.43 21321 21322 k.43 $input=qkv0.43 $dim=21321 $index=21322 #qkv0.43=(3,36,6,64,32)f32 #k.43=(36,6,64,32)f32 prim::Constant pnnx_4256 0 1 21325 value=-1 prim::ListConstruct pnnx_4257 1 1 21325 5626 Tensor.view Tensor.view_1201 2 1 relative_position_index.43 5626 5627 $input=relative_position_index.43 $shape=5626 #relative_position_index.43=(64,64)i64 #5627=(4096)i64 prim::ListConstruct pnnx_4259 1 1 5627 5628 #5627=(4096)i64 prim::Constant pnnx_4261 0 1 21326 value=64 prim::Constant pnnx_4262 0 1 21327 value=-1 prim::ListConstruct pnnx_4263 3 1 5594 21326 21327 5630 Tensor.index Tensor.index_346 2 1 relative_position_bias_table.43 5628 5629 $input=relative_position_bias_table.43 $expr=5628 #relative_position_bias_table.43=(225,6)f32 #5629=(4096,6)f32 prim::Constant pnnx_4265 0 1 21328 value=2 prim::Constant pnnx_4266 0 1 21329 value=0 prim::Constant pnnx_4267 0 1 21330 value=1 prim::ListConstruct pnnx_4268 3 1 21328 21329 21330 5632 Tensor.view Tensor.view_1202 2 1 5629 5630 relative_position_bias.43 $input=5629 $shape=5630 #5629=(4096,6)f32 #relative_position_bias.43=(64,64,6)f32 prim::Constant pnnx_4272 0 1 21332 value=0 torch.permute torch.permute_2610 2 1 relative_position_bias.43 5632 5633 $input=relative_position_bias.43 $dims=5632 #relative_position_bias.43=(64,64,6)f32 #5633=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_63 1 1 5633 relative_position_bias0.43 memory_format=torch.contiguous_format $input=5633 #5633=(6,64,64)f32 #relative_position_bias0.43=(6,64,64)f32 prim::Constant pnnx_4274 0 1 21333 value=1 torch.transpose torch.transpose_3007 3 1 k.43 5592 5593 5624 $input=k.43 $dim0=5592 $dim1=5593 #k.43=(36,6,64,32)f32 #5624=(36,6,32,64)f32 torch.matmul torch.matmul_2244 2 1 q0.43 5624 attn.87 $input=q0.43 $other=5624 #q0.43=(36,6,64,32)f32 #5624=(36,6,32,64)f32 #attn.87=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3258 2 1 relative_position_bias0.43 21332 5635 $input=relative_position_bias0.43 $dim=21332 #relative_position_bias0.43=(6,64,64)f32 #5635=(1,6,64,64)f32 aten::add pnnx_4275 3 1 attn.87 5635 21333 input.97 #attn.87=(36,6,64,64)f32 #5635=(1,6,64,64)f32 #input.97=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.2.attn.softmax 1 1 input.97 5637 dim=-1 #input.97=(36,6,64,64)f32 #5637=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.attn.attn_drop 1 1 5637 5638 #5637=(36,6,64,64)f32 #5638=(36,6,64,64)f32 Tensor.select Tensor.select_712 3 1 qkv0.43 21323 21324 v.43 $input=qkv0.43 $dim=21323 $index=21324 #qkv0.43=(3,36,6,64,32)f32 #v.43=(36,6,64,32)f32 prim::Constant pnnx_4277 0 1 21334 value=1 prim::Constant pnnx_4278 0 1 21335 value=2 torch.matmul torch.matmul_2245 2 1 5638 v.43 5639 $input=5638 $other=v.43 #5638=(36,6,64,64)f32 #v.43=(36,6,64,32)f32 #5639=(36,6,64,32)f32 prim::ListConstruct pnnx_4280 3 1 5604 5608 5612 5641 torch.transpose torch.transpose_3008 3 1 5639 21334 21335 5640 $input=5639 $dim0=21334 $dim1=21335 #5639=(36,6,64,32)f32 #5640=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_475 2 1 5640 5641 input0.45 $input=5640 $shape=5641 #5640=(36,64,6,32)f32 #input0.45=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.2.attn.proj 1 1 input0.45 5643 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.45=(36,64,192)f32 #5643=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.attn.proj_drop 1 1 5643 5644 #5643=(36,64,192)f32 #5644=(36,64,192)f32 prim::Constant pnnx_4282 0 1 21336 value=-1 prim::Constant pnnx_4283 0 1 21337 value=8 prim::Constant pnnx_4284 0 1 21338 value=8 prim::ListConstruct pnnx_4285 4 1 21336 21337 21338 5553 5645 prim::Constant pnnx_4287 0 1 21339 value=8 prim::Constant pnnx_4288 0 1 21340 value=trunc aten::div pnnx_4289 3 1 H.1 21339 21340 5647 aten::Int pnnx_4290 1 1 5647 5648 prim::Constant pnnx_4291 0 1 21341 value=8 prim::Constant pnnx_4292 0 1 21342 value=trunc aten::div pnnx_4293 3 1 W.1 21341 21342 5649 aten::Int pnnx_4294 1 1 5649 5650 prim::Constant pnnx_4295 0 1 21343 value=1 prim::Constant pnnx_4296 0 1 21344 value=8 prim::Constant pnnx_4297 0 1 21345 value=8 prim::Constant pnnx_4298 0 1 21346 value=-1 prim::ListConstruct pnnx_4299 6 1 21343 5648 5650 21344 21345 21346 5651 prim::Constant pnnx_4301 0 1 21347 value=0 prim::Constant pnnx_4302 0 1 21348 value=1 prim::Constant pnnx_4303 0 1 21349 value=3 prim::Constant pnnx_4304 0 1 21350 value=2 prim::Constant pnnx_4305 0 1 21351 value=4 prim::Constant pnnx_4306 0 1 21352 value=5 prim::ListConstruct pnnx_4307 6 1 21347 21348 21349 21350 21351 21352 5653 Tensor.view Tensor.view_1203 2 1 5644 5645 windows.43 $input=5644 $shape=5645 #5644=(36,64,192)f32 #windows.43=(36,8,8,192)f32 Tensor.view Tensor.view_1204 2 1 windows.43 5651 x2.43 $input=windows.43 $shape=5651 #windows.43=(36,8,8,192)f32 #x2.43=(1,6,6,8,8,192)f32 prim::Constant pnnx_4311 0 1 21354 value=1 prim::Constant pnnx_4312 0 1 21355 value=-1 prim::ListConstruct pnnx_4313 4 1 21354 265 505 21355 5656 torch.permute torch.permute_2611 2 1 x2.43 5653 5654 $input=x2.43 $dims=5653 #x2.43=(1,6,6,8,8,192)f32 #5654=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_64 1 1 5654 5655 memory_format=torch.contiguous_format $input=5654 #5654=(1,6,8,6,8,192)f32 #5655=(1,6,8,6,8,192)f32 aten::mul pnnx_4315 2 1 H.1 W.1 5658 aten::Int pnnx_4316 1 1 5658 5659 prim::ListConstruct pnnx_4317 3 1 5548 5659 5552 5660 prim::Constant pnnx_4319 0 1 5662 value=None prim::Constant pnnx_4320 0 1 21356 value=1 Tensor.view Tensor.view_1205 2 1 5655 5656 x3.43 $input=5655 $shape=5656 #5655=(1,6,8,6,8,192)f32 #x3.43=(1,48,48,192)f32 Tensor.view Tensor.view_1206 2 1 x3.43 5660 x4.43 $input=x3.43 $shape=5660 #x3.43=(1,48,48,192)f32 #x4.43=(1,2304,192)f32 aten::add pnnx_4321 3 1 5529 x4.43 21356 input.99 #5529=(1,2304,192)f32 #x4.43=(1,2304,192)f32 #input.99=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.2.norm2 1 1 input.99 5664 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.99=(1,2304,192)f32 #5664=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.2.mlp.fc1 1 1 5664 5669 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #5664=(1,2304,192)f32 #5669=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.2.mlp.act 1 1 5669 5670 #5669=(1,2304,384)f32 #5670=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.mlp.drop 1 1 5670 5671 #5670=(1,2304,384)f32 #5671=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.2.mlp.fc2 1 1 5671 5672 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5671=(1,2304,384)f32 #5672=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.mlp.drop 1 1 5672 5673 #5672=(1,2304,192)f32 #5673=(1,2304,192)f32 prim::Constant pnnx_4322 0 1 5674 value=None prim::Constant pnnx_4323 0 1 21357 value=1 aten::add pnnx_4324 3 1 input.99 5673 21357 5675 #input.99=(1,2304,192)f32 #5673=(1,2304,192)f32 #5675=(1,2304,192)f32 prim::Constant pnnx_4325 0 1 5676 value=trunc prim::Constant pnnx_4326 0 1 5677 value=8 prim::Constant pnnx_4327 0 1 5678 value=0 prim::Constant pnnx_4328 0 1 5679 value=2 prim::Constant pnnx_4329 0 1 5680 value=-4 prim::Constant pnnx_4330 0 1 5681 value=1 prim::Constant pnnx_4331 0 1 5682 value=3 prim::Constant pnnx_4332 0 1 5683 value=8 prim::Constant pnnx_4333 0 1 5684 value=4 prim::Constant pnnx_4334 0 1 5685 value=5 prim::Constant pnnx_4335 0 1 5686 value=-1 prim::Constant pnnx_4336 0 1 5687 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.3 0 1 attn_mask.23 @attn_mask=(36,64,64)f32 #attn_mask.23=(36,64,64)f32 aten::size pnnx_4337 2 1 5675 5678 5694 #5675=(1,2304,192)f32 prim::NumToTensor pnnx_4338 1 1 5694 B.53 aten::Int pnnx_4339 1 1 B.53 5696 aten::Int pnnx_4340 1 1 B.53 5697 aten::size pnnx_4341 2 1 5675 5679 5698 #5675=(1,2304,192)f32 prim::NumToTensor pnnx_4342 1 1 5698 C.95 aten::Int pnnx_4343 1 1 C.95 5700 aten::Int pnnx_4344 1 1 C.95 5701 aten::Int pnnx_4345 1 1 C.95 5702 aten::Int pnnx_4346 1 1 C.95 5703 nn.LayerNorm layers_dfe.3.residual_group.blocks.3.norm1 1 1 5675 5704 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5675=(1,2304,192)f32 #5704=(1,2304,192)f32 prim::ListConstruct pnnx_4347 4 1 5697 262 502 5703 5705 prim::Constant pnnx_4349 0 1 21358 value=-4 prim::ListConstruct pnnx_4350 2 1 5680 21358 5707 prim::Constant pnnx_4351 0 1 21359 value=2 prim::ListConstruct pnnx_4352 2 1 5681 21359 5708 Tensor.view Tensor.view_1207 2 1 5704 5705 x.45 $input=5704 $shape=5705 #5704=(1,2304,192)f32 #x.45=(1,48,48,192)f32 prim::Constant pnnx_4354 0 1 21360 value=0 torch.roll torch.roll_2440 3 1 x.45 5707 5708 x0.45 $input=x.45 $shifts=5707 $dims=5708 #x.45=(1,48,48,192)f32 #x0.45=(1,48,48,192)f32 aten::size pnnx_4355 2 1 x0.45 21360 5710 #x0.45=(1,48,48,192)f32 prim::NumToTensor pnnx_4356 1 1 5710 B0.45 aten::Int pnnx_4357 1 1 B0.45 5712 prim::Constant pnnx_4358 0 1 21361 value=1 aten::size pnnx_4359 2 1 x0.45 21361 5713 #x0.45=(1,48,48,192)f32 prim::NumToTensor pnnx_4360 1 1 5713 5714 prim::Constant pnnx_4361 0 1 21362 value=2 aten::size pnnx_4362 2 1 x0.45 21362 5715 #x0.45=(1,48,48,192)f32 prim::NumToTensor pnnx_4363 1 1 5715 5716 aten::size pnnx_4364 2 1 x0.45 5682 5717 #x0.45=(1,48,48,192)f32 prim::NumToTensor pnnx_4365 1 1 5717 C0.45 aten::Int pnnx_4366 1 1 C0.45 5719 aten::Int pnnx_4367 1 1 C0.45 5720 aten::div pnnx_4368 3 1 5714 5677 5676 5721 aten::Int pnnx_4369 1 1 5721 5722 prim::Constant pnnx_4370 0 1 21363 value=8 prim::Constant pnnx_4371 0 1 21364 value=trunc aten::div pnnx_4372 3 1 5716 21363 21364 5723 aten::Int pnnx_4373 1 1 5723 5724 prim::Constant pnnx_4374 0 1 21365 value=8 prim::ListConstruct pnnx_4375 6 1 5712 5722 5683 5724 21365 5720 5725 prim::Constant pnnx_4377 0 1 21366 value=0 prim::Constant pnnx_4378 0 1 21367 value=1 prim::Constant pnnx_4379 0 1 21368 value=3 prim::Constant pnnx_4380 0 1 21369 value=2 prim::ListConstruct pnnx_4381 6 1 21366 21367 21368 21369 5684 5685 5727 Tensor.view Tensor.view_1208 2 1 x0.45 5725 x1.45 $input=x0.45 $shape=5725 #x0.45=(1,48,48,192)f32 #x1.45=(1,6,8,6,8,192)f32 prim::Constant pnnx_4385 0 1 21371 value=8 prim::Constant pnnx_4386 0 1 21372 value=8 prim::ListConstruct pnnx_4387 4 1 5686 21371 21372 5719 5730 torch.permute torch.permute_2612 2 1 x1.45 5727 5728 $input=x1.45 $dims=5727 #x1.45=(1,6,8,6,8,192)f32 #5728=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_65 1 1 5728 5729 memory_format=torch.contiguous_format $input=5728 #5728=(1,6,6,8,8,192)f32 #5729=(1,6,6,8,8,192)f32 prim::Constant pnnx_4389 0 1 21373 value=-1 prim::ListConstruct pnnx_4390 3 1 21373 5687 5702 5732 prim::Constant pnnx_4392 0 1 5734 value=1.767767e-01 prim::Constant pnnx_4393 0 1 5735 value=trunc prim::Constant pnnx_4394 0 1 5736 value=6 prim::Constant pnnx_4395 0 1 5737 value=0 prim::Constant pnnx_4396 0 1 5738 value=1 prim::Constant pnnx_4397 0 1 5739 value=2 prim::Constant pnnx_4398 0 1 5740 value=3 prim::Constant pnnx_4399 0 1 5741 value=6 prim::Constant pnnx_4400 0 1 5742 value=4 prim::Constant pnnx_4401 0 1 5743 value=-2 prim::Constant pnnx_4402 0 1 5744 value=-1 prim::Constant pnnx_4403 0 1 5745 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.3.attn 0 1 relative_position_bias_table.45 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.45=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.3.attn 0 1 relative_position_index.45 @relative_position_index=(64,64)i64 #relative_position_index.45=(64,64)i64 Tensor.view Tensor.view_1209 2 1 5729 5730 x_windows.45 $input=5729 $shape=5730 #5729=(1,6,6,8,8,192)f32 #x_windows.45=(36,8,8,192)f32 Tensor.view Tensor.view_1210 2 1 x_windows.45 5732 x2.45 $input=x_windows.45 $shape=5732 #x_windows.45=(36,8,8,192)f32 #x2.45=(36,64,192)f32 aten::size pnnx_4404 2 1 x2.45 5737 5753 #x2.45=(36,64,192)f32 prim::NumToTensor pnnx_4405 1 1 5753 B_.45 aten::Int pnnx_4406 1 1 B_.45 5755 aten::Int pnnx_4407 1 1 B_.45 5756 aten::size pnnx_4408 2 1 x2.45 5738 5757 #x2.45=(36,64,192)f32 prim::NumToTensor pnnx_4409 1 1 5757 N.45 aten::Int pnnx_4410 1 1 N.45 5759 aten::Int pnnx_4411 1 1 N.45 5760 aten::Int pnnx_4412 1 1 N.45 5761 aten::Int pnnx_4413 1 1 N.45 5762 aten::Int pnnx_4414 1 1 N.45 5763 aten::Int pnnx_4415 1 1 N.45 5764 aten::size pnnx_4416 2 1 x2.45 5739 5765 #x2.45=(36,64,192)f32 prim::NumToTensor pnnx_4417 1 1 5765 C.97 aten::Int pnnx_4418 1 1 C.97 5767 nn.Linear layers_dfe.3.residual_group.blocks.3.attn.qkv 1 1 x2.45 5768 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.45=(36,64,192)f32 #5768=(36,64,576)f32 aten::div pnnx_4419 3 1 C.97 5736 5735 5769 aten::Int pnnx_4420 1 1 5769 5770 prim::ListConstruct pnnx_4421 5 1 5756 5764 5740 5741 5770 5771 prim::Constant pnnx_4423 0 1 21374 value=2 prim::Constant pnnx_4424 0 1 21375 value=0 prim::Constant pnnx_4425 0 1 21376 value=3 prim::Constant pnnx_4426 0 1 21377 value=1 prim::ListConstruct pnnx_4427 5 1 21374 21375 21376 21377 5742 5773 Tensor.reshape Tensor.reshape_476 2 1 5768 5771 5772 $input=5768 $shape=5771 #5768=(36,64,576)f32 #5772=(36,64,3,6,32)f32 prim::Constant pnnx_4429 0 1 21378 value=0 prim::Constant pnnx_4430 0 1 21379 value=0 prim::Constant pnnx_4432 0 1 21380 value=0 prim::Constant pnnx_4433 0 1 21381 value=1 prim::Constant pnnx_4435 0 1 21382 value=0 prim::Constant pnnx_4436 0 1 21383 value=2 torch.permute torch.permute_2613 2 1 5772 5773 qkv0.45 $input=5772 $dims=5773 #5772=(36,64,3,6,32)f32 #qkv0.45=(3,36,6,64,32)f32 Tensor.select Tensor.select_713 3 1 qkv0.45 21378 21379 q.45 $input=qkv0.45 $dim=21378 $index=21379 #qkv0.45=(3,36,6,64,32)f32 #q.45=(36,6,64,32)f32 aten::mul pnnx_4438 2 1 q.45 5734 q0.45 #q.45=(36,6,64,32)f32 #q0.45=(36,6,64,32)f32 Tensor.select Tensor.select_714 3 1 qkv0.45 21380 21381 k.45 $input=qkv0.45 $dim=21380 $index=21381 #qkv0.45=(3,36,6,64,32)f32 #k.45=(36,6,64,32)f32 prim::Constant pnnx_4441 0 1 21384 value=-1 prim::ListConstruct pnnx_4442 1 1 21384 5781 Tensor.view Tensor.view_1211 2 1 relative_position_index.45 5781 5782 $input=relative_position_index.45 $shape=5781 #relative_position_index.45=(64,64)i64 #5782=(4096)i64 prim::ListConstruct pnnx_4444 1 1 5782 5783 #5782=(4096)i64 prim::Constant pnnx_4446 0 1 21385 value=64 prim::Constant pnnx_4447 0 1 21386 value=-1 prim::ListConstruct pnnx_4448 3 1 5745 21385 21386 5785 Tensor.index Tensor.index_347 2 1 relative_position_bias_table.45 5783 5784 $input=relative_position_bias_table.45 $expr=5783 #relative_position_bias_table.45=(225,6)f32 #5784=(4096,6)f32 prim::Constant pnnx_4450 0 1 21387 value=2 prim::Constant pnnx_4451 0 1 21388 value=0 prim::Constant pnnx_4452 0 1 21389 value=1 prim::ListConstruct pnnx_4453 3 1 21387 21388 21389 5787 Tensor.view Tensor.view_1212 2 1 5784 5785 relative_position_bias.45 $input=5784 $shape=5785 #5784=(4096,6)f32 #relative_position_bias.45=(64,64,6)f32 prim::Constant pnnx_4457 0 1 21391 value=0 torch.permute torch.permute_2614 2 1 relative_position_bias.45 5787 5788 $input=relative_position_bias.45 $dims=5787 #relative_position_bias.45=(64,64,6)f32 #5788=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_66 1 1 5788 relative_position_bias0.45 memory_format=torch.contiguous_format $input=5788 #5788=(6,64,64)f32 #relative_position_bias0.45=(6,64,64)f32 prim::Constant pnnx_4459 0 1 21392 value=1 torch.transpose torch.transpose_3009 3 1 k.45 5743 5744 5779 $input=k.45 $dim0=5743 $dim1=5744 #k.45=(36,6,64,32)f32 #5779=(36,6,32,64)f32 torch.matmul torch.matmul_2246 2 1 q0.45 5779 attn.91 $input=q0.45 $other=5779 #q0.45=(36,6,64,32)f32 #5779=(36,6,32,64)f32 #attn.91=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3259 2 1 relative_position_bias0.45 21391 5790 $input=relative_position_bias0.45 $dim=21391 #relative_position_bias0.45=(6,64,64)f32 #5790=(1,6,64,64)f32 aten::add pnnx_4460 3 1 attn.91 5790 21392 attn0.23 #attn.91=(36,6,64,64)f32 #5790=(1,6,64,64)f32 #attn0.23=(36,6,64,64)f32 prim::Constant pnnx_4461 0 1 21393 value=0 aten::size pnnx_4462 2 1 attn_mask.23 21393 5792 #attn_mask.23=(36,64,64)f32 prim::NumToTensor pnnx_4463 1 1 5792 other.23 aten::Int pnnx_4464 1 1 other.23 5794 prim::Constant pnnx_4465 0 1 21394 value=trunc aten::div pnnx_4466 3 1 B_.45 other.23 21394 5795 aten::Int pnnx_4467 1 1 5795 5796 prim::Constant pnnx_4468 0 1 21395 value=6 prim::ListConstruct pnnx_4469 5 1 5796 5794 21395 5763 5762 5797 prim::Constant pnnx_4471 0 1 21396 value=1 prim::Constant pnnx_4473 0 1 21397 value=0 prim::Constant pnnx_4475 0 1 21398 value=1 Tensor.view Tensor.view_1213 2 1 attn0.23 5797 5798 $input=attn0.23 $shape=5797 #attn0.23=(36,6,64,64)f32 #5798=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3260 2 1 attn_mask.23 21396 5799 $input=attn_mask.23 $dim=21396 #attn_mask.23=(36,64,64)f32 #5799=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3261 2 1 5799 21397 5800 $input=5799 $dim=21397 #5799=(36,1,64,64)f32 #5800=(1,36,1,64,64)f32 aten::add pnnx_4476 3 1 5798 5800 21398 attn1.23 #5798=(1,36,6,64,64)f32 #5800=(1,36,1,64,64)f32 #attn1.23=(1,36,6,64,64)f32 prim::Constant pnnx_4477 0 1 21399 value=-1 prim::Constant pnnx_4478 0 1 21400 value=6 prim::ListConstruct pnnx_4479 4 1 21399 21400 5761 5760 5802 Tensor.view Tensor.view_1214 2 1 attn1.23 5802 input.101 $input=attn1.23 $shape=5802 #attn1.23=(1,36,6,64,64)f32 #input.101=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.3.attn.softmax 1 1 input.101 5804 dim=-1 #input.101=(36,6,64,64)f32 #5804=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.attn.attn_drop 1 1 5804 5805 #5804=(36,6,64,64)f32 #5805=(36,6,64,64)f32 Tensor.select Tensor.select_715 3 1 qkv0.45 21382 21383 v.45 $input=qkv0.45 $dim=21382 $index=21383 #qkv0.45=(3,36,6,64,32)f32 #v.45=(36,6,64,32)f32 prim::Constant pnnx_4482 0 1 21401 value=1 prim::Constant pnnx_4483 0 1 21402 value=2 torch.matmul torch.matmul_2247 2 1 5805 v.45 5806 $input=5805 $other=v.45 #5805=(36,6,64,64)f32 #v.45=(36,6,64,32)f32 #5806=(36,6,64,32)f32 prim::ListConstruct pnnx_4485 3 1 5755 5759 5767 5808 torch.transpose torch.transpose_3010 3 1 5806 21401 21402 5807 $input=5806 $dim0=21401 $dim1=21402 #5806=(36,6,64,32)f32 #5807=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_477 2 1 5807 5808 input0.47 $input=5807 $shape=5808 #5807=(36,64,6,32)f32 #input0.47=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.3.attn.proj 1 1 input0.47 5810 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.47=(36,64,192)f32 #5810=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.attn.proj_drop 1 1 5810 5811 #5810=(36,64,192)f32 #5811=(36,64,192)f32 prim::Constant pnnx_4487 0 1 21403 value=-1 prim::Constant pnnx_4488 0 1 21404 value=8 prim::Constant pnnx_4489 0 1 21405 value=8 prim::ListConstruct pnnx_4490 4 1 21403 21404 21405 5701 5812 prim::Constant pnnx_4492 0 1 21406 value=8 prim::Constant pnnx_4493 0 1 21407 value=trunc aten::div pnnx_4494 3 1 H.1 21406 21407 5814 aten::Int pnnx_4495 1 1 5814 5815 prim::Constant pnnx_4496 0 1 21408 value=8 prim::Constant pnnx_4497 0 1 21409 value=trunc aten::div pnnx_4498 3 1 W.1 21408 21409 5816 aten::Int pnnx_4499 1 1 5816 5817 prim::Constant pnnx_4500 0 1 21410 value=1 prim::Constant pnnx_4501 0 1 21411 value=8 prim::Constant pnnx_4502 0 1 21412 value=8 prim::Constant pnnx_4503 0 1 21413 value=-1 prim::ListConstruct pnnx_4504 6 1 21410 5815 5817 21411 21412 21413 5818 prim::Constant pnnx_4506 0 1 21414 value=0 prim::Constant pnnx_4507 0 1 21415 value=1 prim::Constant pnnx_4508 0 1 21416 value=3 prim::Constant pnnx_4509 0 1 21417 value=2 prim::Constant pnnx_4510 0 1 21418 value=4 prim::Constant pnnx_4511 0 1 21419 value=5 prim::ListConstruct pnnx_4512 6 1 21414 21415 21416 21417 21418 21419 5820 Tensor.view Tensor.view_1215 2 1 5811 5812 windows.45 $input=5811 $shape=5812 #5811=(36,64,192)f32 #windows.45=(36,8,8,192)f32 Tensor.view Tensor.view_1216 2 1 windows.45 5818 x3.45 $input=windows.45 $shape=5818 #windows.45=(36,8,8,192)f32 #x3.45=(1,6,6,8,8,192)f32 prim::Constant pnnx_4516 0 1 21421 value=1 prim::Constant pnnx_4517 0 1 21422 value=-1 prim::ListConstruct pnnx_4518 4 1 21421 259 499 21422 5823 torch.permute torch.permute_2615 2 1 x3.45 5820 5821 $input=x3.45 $dims=5820 #x3.45=(1,6,6,8,8,192)f32 #5821=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_67 1 1 5821 5822 memory_format=torch.contiguous_format $input=5821 #5821=(1,6,8,6,8,192)f32 #5822=(1,6,8,6,8,192)f32 prim::Constant pnnx_4520 0 1 21423 value=4 prim::Constant pnnx_4521 0 1 21424 value=4 prim::ListConstruct pnnx_4522 2 1 21423 21424 5825 prim::Constant pnnx_4523 0 1 21425 value=1 prim::Constant pnnx_4524 0 1 21426 value=2 prim::ListConstruct pnnx_4525 2 1 21425 21426 5826 Tensor.view Tensor.view_1217 2 1 5822 5823 shifted_x.23 $input=5822 $shape=5823 #5822=(1,6,8,6,8,192)f32 #shifted_x.23=(1,48,48,192)f32 aten::mul pnnx_4527 2 1 H.1 W.1 5828 aten::Int pnnx_4528 1 1 5828 5829 prim::ListConstruct pnnx_4529 3 1 5696 5829 5700 5830 prim::Constant pnnx_4531 0 1 5832 value=None prim::Constant pnnx_4532 0 1 21427 value=1 torch.roll torch.roll_2441 3 1 shifted_x.23 5825 5826 x4.45 $input=shifted_x.23 $shifts=5825 $dims=5826 #shifted_x.23=(1,48,48,192)f32 #x4.45=(1,48,48,192)f32 Tensor.view Tensor.view_1218 2 1 x4.45 5830 x5.23 $input=x4.45 $shape=5830 #x4.45=(1,48,48,192)f32 #x5.23=(1,2304,192)f32 aten::add pnnx_4533 3 1 5675 x5.23 21427 input.103 #5675=(1,2304,192)f32 #x5.23=(1,2304,192)f32 #input.103=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.3.norm2 1 1 input.103 5834 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.103=(1,2304,192)f32 #5834=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.3.mlp.fc1 1 1 5834 5839 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #5834=(1,2304,192)f32 #5839=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.3.mlp.act 1 1 5839 5840 #5839=(1,2304,384)f32 #5840=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.mlp.drop 1 1 5840 5841 #5840=(1,2304,384)f32 #5841=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.3.mlp.fc2 1 1 5841 5842 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5841=(1,2304,384)f32 #5842=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.mlp.drop 1 1 5842 5843 #5842=(1,2304,192)f32 #5843=(1,2304,192)f32 prim::Constant pnnx_4534 0 1 5844 value=None prim::Constant pnnx_4535 0 1 21428 value=1 aten::add pnnx_4536 3 1 input.103 5843 21428 5845 #input.103=(1,2304,192)f32 #5843=(1,2304,192)f32 #5845=(1,2304,192)f32 prim::Constant pnnx_4537 0 1 5846 value=trunc prim::Constant pnnx_4538 0 1 5847 value=8 prim::Constant pnnx_4539 0 1 5848 value=0 prim::Constant pnnx_4540 0 1 5849 value=2 prim::Constant pnnx_4541 0 1 5850 value=1 prim::Constant pnnx_4542 0 1 5851 value=3 prim::Constant pnnx_4543 0 1 5852 value=8 prim::Constant pnnx_4544 0 1 5853 value=4 prim::Constant pnnx_4545 0 1 5854 value=5 prim::Constant pnnx_4546 0 1 5855 value=-1 prim::Constant pnnx_4547 0 1 5856 value=64 aten::size pnnx_4548 2 1 5845 5848 5862 #5845=(1,2304,192)f32 prim::NumToTensor pnnx_4549 1 1 5862 B.55 aten::Int pnnx_4550 1 1 B.55 5864 aten::Int pnnx_4551 1 1 B.55 5865 aten::size pnnx_4552 2 1 5845 5849 5866 #5845=(1,2304,192)f32 prim::NumToTensor pnnx_4553 1 1 5866 C.99 aten::Int pnnx_4554 1 1 C.99 5868 aten::Int pnnx_4555 1 1 C.99 5869 aten::Int pnnx_4556 1 1 C.99 5870 aten::Int pnnx_4557 1 1 C.99 5871 nn.LayerNorm layers_dfe.3.residual_group.blocks.4.norm1 1 1 5845 5872 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5845=(1,2304,192)f32 #5872=(1,2304,192)f32 prim::ListConstruct pnnx_4558 4 1 5865 256 496 5871 5873 prim::Constant pnnx_4560 0 1 21429 value=0 Tensor.view Tensor.view_1219 2 1 5872 5873 x.47 $input=5872 $shape=5873 #5872=(1,2304,192)f32 #x.47=(1,48,48,192)f32 aten::size pnnx_4561 2 1 x.47 21429 5875 #x.47=(1,48,48,192)f32 prim::NumToTensor pnnx_4562 1 1 5875 B0.47 aten::Int pnnx_4563 1 1 B0.47 5877 aten::size pnnx_4564 2 1 x.47 5850 5878 #x.47=(1,48,48,192)f32 prim::NumToTensor pnnx_4565 1 1 5878 5879 prim::Constant pnnx_4566 0 1 21430 value=2 aten::size pnnx_4567 2 1 x.47 21430 5880 #x.47=(1,48,48,192)f32 prim::NumToTensor pnnx_4568 1 1 5880 5881 aten::size pnnx_4569 2 1 x.47 5851 5882 #x.47=(1,48,48,192)f32 prim::NumToTensor pnnx_4570 1 1 5882 C0.47 aten::Int pnnx_4571 1 1 C0.47 5884 aten::Int pnnx_4572 1 1 C0.47 5885 aten::div pnnx_4573 3 1 5879 5847 5846 5886 aten::Int pnnx_4574 1 1 5886 5887 prim::Constant pnnx_4575 0 1 21431 value=8 prim::Constant pnnx_4576 0 1 21432 value=trunc aten::div pnnx_4577 3 1 5881 21431 21432 5888 aten::Int pnnx_4578 1 1 5888 5889 prim::Constant pnnx_4579 0 1 21433 value=8 prim::ListConstruct pnnx_4580 6 1 5877 5887 5852 5889 21433 5885 5890 prim::Constant pnnx_4582 0 1 21434 value=0 prim::Constant pnnx_4583 0 1 21435 value=1 prim::Constant pnnx_4584 0 1 21436 value=3 prim::Constant pnnx_4585 0 1 21437 value=2 prim::ListConstruct pnnx_4586 6 1 21434 21435 21436 21437 5853 5854 5892 Tensor.view Tensor.view_1220 2 1 x.47 5890 x0.47 $input=x.47 $shape=5890 #x.47=(1,48,48,192)f32 #x0.47=(1,6,8,6,8,192)f32 prim::Constant pnnx_4590 0 1 21439 value=8 prim::Constant pnnx_4591 0 1 21440 value=8 prim::ListConstruct pnnx_4592 4 1 5855 21439 21440 5884 5895 torch.permute torch.permute_2616 2 1 x0.47 5892 5893 $input=x0.47 $dims=5892 #x0.47=(1,6,8,6,8,192)f32 #5893=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_68 1 1 5893 5894 memory_format=torch.contiguous_format $input=5893 #5893=(1,6,6,8,8,192)f32 #5894=(1,6,6,8,8,192)f32 prim::Constant pnnx_4594 0 1 21441 value=-1 prim::ListConstruct pnnx_4595 3 1 21441 5856 5870 5897 prim::Constant pnnx_4597 0 1 5899 value=1.767767e-01 prim::Constant pnnx_4598 0 1 5900 value=trunc prim::Constant pnnx_4599 0 1 5901 value=6 prim::Constant pnnx_4600 0 1 5902 value=0 prim::Constant pnnx_4601 0 1 5903 value=1 prim::Constant pnnx_4602 0 1 5904 value=2 prim::Constant pnnx_4603 0 1 5905 value=3 prim::Constant pnnx_4604 0 1 5906 value=6 prim::Constant pnnx_4605 0 1 5907 value=4 prim::Constant pnnx_4606 0 1 5908 value=-2 prim::Constant pnnx_4607 0 1 5909 value=-1 prim::Constant pnnx_4608 0 1 5910 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.4.attn 0 1 relative_position_bias_table.47 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.47=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.4.attn 0 1 relative_position_index.47 @relative_position_index=(64,64)i64 #relative_position_index.47=(64,64)i64 Tensor.view Tensor.view_1221 2 1 5894 5895 x_windows.47 $input=5894 $shape=5895 #5894=(1,6,6,8,8,192)f32 #x_windows.47=(36,8,8,192)f32 Tensor.view Tensor.view_1222 2 1 x_windows.47 5897 x1.47 $input=x_windows.47 $shape=5897 #x_windows.47=(36,8,8,192)f32 #x1.47=(36,64,192)f32 aten::size pnnx_4609 2 1 x1.47 5902 5918 #x1.47=(36,64,192)f32 prim::NumToTensor pnnx_4610 1 1 5918 B_.47 aten::Int pnnx_4611 1 1 B_.47 5920 aten::Int pnnx_4612 1 1 B_.47 5921 aten::size pnnx_4613 2 1 x1.47 5903 5922 #x1.47=(36,64,192)f32 prim::NumToTensor pnnx_4614 1 1 5922 N.47 aten::Int pnnx_4615 1 1 N.47 5924 aten::Int pnnx_4616 1 1 N.47 5925 aten::size pnnx_4617 2 1 x1.47 5904 5926 #x1.47=(36,64,192)f32 prim::NumToTensor pnnx_4618 1 1 5926 C.101 aten::Int pnnx_4619 1 1 C.101 5928 nn.Linear layers_dfe.3.residual_group.blocks.4.attn.qkv 1 1 x1.47 5929 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.47=(36,64,192)f32 #5929=(36,64,576)f32 aten::div pnnx_4620 3 1 C.101 5901 5900 5930 aten::Int pnnx_4621 1 1 5930 5931 prim::ListConstruct pnnx_4622 5 1 5921 5925 5905 5906 5931 5932 prim::Constant pnnx_4624 0 1 21442 value=2 prim::Constant pnnx_4625 0 1 21443 value=0 prim::Constant pnnx_4626 0 1 21444 value=3 prim::Constant pnnx_4627 0 1 21445 value=1 prim::ListConstruct pnnx_4628 5 1 21442 21443 21444 21445 5907 5934 Tensor.reshape Tensor.reshape_478 2 1 5929 5932 5933 $input=5929 $shape=5932 #5929=(36,64,576)f32 #5933=(36,64,3,6,32)f32 prim::Constant pnnx_4630 0 1 21446 value=0 prim::Constant pnnx_4631 0 1 21447 value=0 prim::Constant pnnx_4633 0 1 21448 value=0 prim::Constant pnnx_4634 0 1 21449 value=1 prim::Constant pnnx_4636 0 1 21450 value=0 prim::Constant pnnx_4637 0 1 21451 value=2 torch.permute torch.permute_2617 2 1 5933 5934 qkv0.47 $input=5933 $dims=5934 #5933=(36,64,3,6,32)f32 #qkv0.47=(3,36,6,64,32)f32 Tensor.select Tensor.select_716 3 1 qkv0.47 21446 21447 q.47 $input=qkv0.47 $dim=21446 $index=21447 #qkv0.47=(3,36,6,64,32)f32 #q.47=(36,6,64,32)f32 aten::mul pnnx_4639 2 1 q.47 5899 q0.47 #q.47=(36,6,64,32)f32 #q0.47=(36,6,64,32)f32 Tensor.select Tensor.select_717 3 1 qkv0.47 21448 21449 k.47 $input=qkv0.47 $dim=21448 $index=21449 #qkv0.47=(3,36,6,64,32)f32 #k.47=(36,6,64,32)f32 prim::Constant pnnx_4642 0 1 21452 value=-1 prim::ListConstruct pnnx_4643 1 1 21452 5942 Tensor.view Tensor.view_1223 2 1 relative_position_index.47 5942 5943 $input=relative_position_index.47 $shape=5942 #relative_position_index.47=(64,64)i64 #5943=(4096)i64 prim::ListConstruct pnnx_4645 1 1 5943 5944 #5943=(4096)i64 prim::Constant pnnx_4647 0 1 21453 value=64 prim::Constant pnnx_4648 0 1 21454 value=-1 prim::ListConstruct pnnx_4649 3 1 5910 21453 21454 5946 Tensor.index Tensor.index_348 2 1 relative_position_bias_table.47 5944 5945 $input=relative_position_bias_table.47 $expr=5944 #relative_position_bias_table.47=(225,6)f32 #5945=(4096,6)f32 prim::Constant pnnx_4651 0 1 21455 value=2 prim::Constant pnnx_4652 0 1 21456 value=0 prim::Constant pnnx_4653 0 1 21457 value=1 prim::ListConstruct pnnx_4654 3 1 21455 21456 21457 5948 Tensor.view Tensor.view_1224 2 1 5945 5946 relative_position_bias.47 $input=5945 $shape=5946 #5945=(4096,6)f32 #relative_position_bias.47=(64,64,6)f32 prim::Constant pnnx_4658 0 1 21459 value=0 torch.permute torch.permute_2618 2 1 relative_position_bias.47 5948 5949 $input=relative_position_bias.47 $dims=5948 #relative_position_bias.47=(64,64,6)f32 #5949=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_69 1 1 5949 relative_position_bias0.47 memory_format=torch.contiguous_format $input=5949 #5949=(6,64,64)f32 #relative_position_bias0.47=(6,64,64)f32 prim::Constant pnnx_4660 0 1 21460 value=1 torch.transpose torch.transpose_3011 3 1 k.47 5908 5909 5940 $input=k.47 $dim0=5908 $dim1=5909 #k.47=(36,6,64,32)f32 #5940=(36,6,32,64)f32 torch.matmul torch.matmul_2248 2 1 q0.47 5940 attn.95 $input=q0.47 $other=5940 #q0.47=(36,6,64,32)f32 #5940=(36,6,32,64)f32 #attn.95=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3262 2 1 relative_position_bias0.47 21459 5951 $input=relative_position_bias0.47 $dim=21459 #relative_position_bias0.47=(6,64,64)f32 #5951=(1,6,64,64)f32 aten::add pnnx_4661 3 1 attn.95 5951 21460 input.105 #attn.95=(36,6,64,64)f32 #5951=(1,6,64,64)f32 #input.105=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.4.attn.softmax 1 1 input.105 5953 dim=-1 #input.105=(36,6,64,64)f32 #5953=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.attn.attn_drop 1 1 5953 5954 #5953=(36,6,64,64)f32 #5954=(36,6,64,64)f32 Tensor.select Tensor.select_718 3 1 qkv0.47 21450 21451 v.47 $input=qkv0.47 $dim=21450 $index=21451 #qkv0.47=(3,36,6,64,32)f32 #v.47=(36,6,64,32)f32 prim::Constant pnnx_4663 0 1 21461 value=1 prim::Constant pnnx_4664 0 1 21462 value=2 torch.matmul torch.matmul_2249 2 1 5954 v.47 5955 $input=5954 $other=v.47 #5954=(36,6,64,64)f32 #v.47=(36,6,64,32)f32 #5955=(36,6,64,32)f32 prim::ListConstruct pnnx_4666 3 1 5920 5924 5928 5957 torch.transpose torch.transpose_3012 3 1 5955 21461 21462 5956 $input=5955 $dim0=21461 $dim1=21462 #5955=(36,6,64,32)f32 #5956=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_479 2 1 5956 5957 input0.49 $input=5956 $shape=5957 #5956=(36,64,6,32)f32 #input0.49=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.4.attn.proj 1 1 input0.49 5959 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.49=(36,64,192)f32 #5959=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.attn.proj_drop 1 1 5959 5960 #5959=(36,64,192)f32 #5960=(36,64,192)f32 prim::Constant pnnx_4668 0 1 21463 value=-1 prim::Constant pnnx_4669 0 1 21464 value=8 prim::Constant pnnx_4670 0 1 21465 value=8 prim::ListConstruct pnnx_4671 4 1 21463 21464 21465 5869 5961 prim::Constant pnnx_4673 0 1 21466 value=8 prim::Constant pnnx_4674 0 1 21467 value=trunc aten::div pnnx_4675 3 1 H.1 21466 21467 5963 aten::Int pnnx_4676 1 1 5963 5964 prim::Constant pnnx_4677 0 1 21468 value=8 prim::Constant pnnx_4678 0 1 21469 value=trunc aten::div pnnx_4679 3 1 W.1 21468 21469 5965 aten::Int pnnx_4680 1 1 5965 5966 prim::Constant pnnx_4681 0 1 21470 value=1 prim::Constant pnnx_4682 0 1 21471 value=8 prim::Constant pnnx_4683 0 1 21472 value=8 prim::Constant pnnx_4684 0 1 21473 value=-1 prim::ListConstruct pnnx_4685 6 1 21470 5964 5966 21471 21472 21473 5967 prim::Constant pnnx_4687 0 1 21474 value=0 prim::Constant pnnx_4688 0 1 21475 value=1 prim::Constant pnnx_4689 0 1 21476 value=3 prim::Constant pnnx_4690 0 1 21477 value=2 prim::Constant pnnx_4691 0 1 21478 value=4 prim::Constant pnnx_4692 0 1 21479 value=5 prim::ListConstruct pnnx_4693 6 1 21474 21475 21476 21477 21478 21479 5969 Tensor.view Tensor.view_1225 2 1 5960 5961 windows.47 $input=5960 $shape=5961 #5960=(36,64,192)f32 #windows.47=(36,8,8,192)f32 Tensor.view Tensor.view_1226 2 1 windows.47 5967 x2.47 $input=windows.47 $shape=5967 #windows.47=(36,8,8,192)f32 #x2.47=(1,6,6,8,8,192)f32 prim::Constant pnnx_4697 0 1 21481 value=1 prim::Constant pnnx_4698 0 1 21482 value=-1 prim::ListConstruct pnnx_4699 4 1 21481 253 493 21482 5972 torch.permute torch.permute_2619 2 1 x2.47 5969 5970 $input=x2.47 $dims=5969 #x2.47=(1,6,6,8,8,192)f32 #5970=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_70 1 1 5970 5971 memory_format=torch.contiguous_format $input=5970 #5970=(1,6,8,6,8,192)f32 #5971=(1,6,8,6,8,192)f32 aten::mul pnnx_4701 2 1 H.1 W.1 5974 aten::Int pnnx_4702 1 1 5974 5975 prim::ListConstruct pnnx_4703 3 1 5864 5975 5868 5976 prim::Constant pnnx_4705 0 1 5978 value=None prim::Constant pnnx_4706 0 1 21483 value=1 Tensor.view Tensor.view_1227 2 1 5971 5972 x3.47 $input=5971 $shape=5972 #5971=(1,6,8,6,8,192)f32 #x3.47=(1,48,48,192)f32 Tensor.view Tensor.view_1228 2 1 x3.47 5976 x4.47 $input=x3.47 $shape=5976 #x3.47=(1,48,48,192)f32 #x4.47=(1,2304,192)f32 aten::add pnnx_4707 3 1 5845 x4.47 21483 input.107 #5845=(1,2304,192)f32 #x4.47=(1,2304,192)f32 #input.107=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.4.norm2 1 1 input.107 5980 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.107=(1,2304,192)f32 #5980=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.4.mlp.fc1 1 1 5980 5985 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #5980=(1,2304,192)f32 #5985=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.4.mlp.act 1 1 5985 5986 #5985=(1,2304,384)f32 #5986=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.mlp.drop 1 1 5986 5987 #5986=(1,2304,384)f32 #5987=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.4.mlp.fc2 1 1 5987 5988 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #5987=(1,2304,384)f32 #5988=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.mlp.drop 1 1 5988 5989 #5988=(1,2304,192)f32 #5989=(1,2304,192)f32 prim::Constant pnnx_4708 0 1 5990 value=None prim::Constant pnnx_4709 0 1 21484 value=1 aten::add pnnx_4710 3 1 input.107 5989 21484 5991 #input.107=(1,2304,192)f32 #5989=(1,2304,192)f32 #5991=(1,2304,192)f32 prim::Constant pnnx_4711 0 1 5992 value=trunc prim::Constant pnnx_4712 0 1 5993 value=8 prim::Constant pnnx_4713 0 1 5994 value=0 prim::Constant pnnx_4714 0 1 5995 value=2 prim::Constant pnnx_4715 0 1 5996 value=-4 prim::Constant pnnx_4716 0 1 5997 value=1 prim::Constant pnnx_4717 0 1 5998 value=3 prim::Constant pnnx_4718 0 1 5999 value=8 prim::Constant pnnx_4719 0 1 6000 value=4 prim::Constant pnnx_4720 0 1 6001 value=5 prim::Constant pnnx_4721 0 1 6002 value=-1 prim::Constant pnnx_4722 0 1 6003 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.5 0 1 attn_mask.25 @attn_mask=(36,64,64)f32 #attn_mask.25=(36,64,64)f32 aten::size pnnx_4723 2 1 5991 5994 6010 #5991=(1,2304,192)f32 prim::NumToTensor pnnx_4724 1 1 6010 B.57 aten::Int pnnx_4725 1 1 B.57 6012 aten::Int pnnx_4726 1 1 B.57 6013 aten::size pnnx_4727 2 1 5991 5995 6014 #5991=(1,2304,192)f32 prim::NumToTensor pnnx_4728 1 1 6014 C.103 aten::Int pnnx_4729 1 1 C.103 6016 aten::Int pnnx_4730 1 1 C.103 6017 aten::Int pnnx_4731 1 1 C.103 6018 aten::Int pnnx_4732 1 1 C.103 6019 nn.LayerNorm layers_dfe.3.residual_group.blocks.5.norm1 1 1 5991 6020 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #5991=(1,2304,192)f32 #6020=(1,2304,192)f32 prim::ListConstruct pnnx_4733 4 1 6013 250 490 6019 6021 prim::Constant pnnx_4735 0 1 21485 value=-4 prim::ListConstruct pnnx_4736 2 1 5996 21485 6023 prim::Constant pnnx_4737 0 1 21486 value=2 prim::ListConstruct pnnx_4738 2 1 5997 21486 6024 Tensor.view Tensor.view_1229 2 1 6020 6021 x.49 $input=6020 $shape=6021 #6020=(1,2304,192)f32 #x.49=(1,48,48,192)f32 prim::Constant pnnx_4740 0 1 21487 value=0 torch.roll torch.roll_2442 3 1 x.49 6023 6024 x0.49 $input=x.49 $shifts=6023 $dims=6024 #x.49=(1,48,48,192)f32 #x0.49=(1,48,48,192)f32 aten::size pnnx_4741 2 1 x0.49 21487 6026 #x0.49=(1,48,48,192)f32 prim::NumToTensor pnnx_4742 1 1 6026 B0.49 aten::Int pnnx_4743 1 1 B0.49 6028 prim::Constant pnnx_4744 0 1 21488 value=1 aten::size pnnx_4745 2 1 x0.49 21488 6029 #x0.49=(1,48,48,192)f32 prim::NumToTensor pnnx_4746 1 1 6029 6030 prim::Constant pnnx_4747 0 1 21489 value=2 aten::size pnnx_4748 2 1 x0.49 21489 6031 #x0.49=(1,48,48,192)f32 prim::NumToTensor pnnx_4749 1 1 6031 6032 aten::size pnnx_4750 2 1 x0.49 5998 6033 #x0.49=(1,48,48,192)f32 prim::NumToTensor pnnx_4751 1 1 6033 C0.49 aten::Int pnnx_4752 1 1 C0.49 6035 aten::Int pnnx_4753 1 1 C0.49 6036 aten::div pnnx_4754 3 1 6030 5993 5992 6037 aten::Int pnnx_4755 1 1 6037 6038 prim::Constant pnnx_4756 0 1 21490 value=8 prim::Constant pnnx_4757 0 1 21491 value=trunc aten::div pnnx_4758 3 1 6032 21490 21491 6039 aten::Int pnnx_4759 1 1 6039 6040 prim::Constant pnnx_4760 0 1 21492 value=8 prim::ListConstruct pnnx_4761 6 1 6028 6038 5999 6040 21492 6036 6041 prim::Constant pnnx_4763 0 1 21493 value=0 prim::Constant pnnx_4764 0 1 21494 value=1 prim::Constant pnnx_4765 0 1 21495 value=3 prim::Constant pnnx_4766 0 1 21496 value=2 prim::ListConstruct pnnx_4767 6 1 21493 21494 21495 21496 6000 6001 6043 Tensor.view Tensor.view_1230 2 1 x0.49 6041 x1.49 $input=x0.49 $shape=6041 #x0.49=(1,48,48,192)f32 #x1.49=(1,6,8,6,8,192)f32 prim::Constant pnnx_4771 0 1 21498 value=8 prim::Constant pnnx_4772 0 1 21499 value=8 prim::ListConstruct pnnx_4773 4 1 6002 21498 21499 6035 6046 torch.permute torch.permute_2620 2 1 x1.49 6043 6044 $input=x1.49 $dims=6043 #x1.49=(1,6,8,6,8,192)f32 #6044=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_71 1 1 6044 6045 memory_format=torch.contiguous_format $input=6044 #6044=(1,6,6,8,8,192)f32 #6045=(1,6,6,8,8,192)f32 prim::Constant pnnx_4775 0 1 21500 value=-1 prim::ListConstruct pnnx_4776 3 1 21500 6003 6018 6048 prim::Constant pnnx_4778 0 1 6050 value=1.767767e-01 prim::Constant pnnx_4779 0 1 6051 value=trunc prim::Constant pnnx_4780 0 1 6052 value=6 prim::Constant pnnx_4781 0 1 6053 value=0 prim::Constant pnnx_4782 0 1 6054 value=1 prim::Constant pnnx_4783 0 1 6055 value=2 prim::Constant pnnx_4784 0 1 6056 value=3 prim::Constant pnnx_4785 0 1 6057 value=6 prim::Constant pnnx_4786 0 1 6058 value=4 prim::Constant pnnx_4787 0 1 6059 value=-2 prim::Constant pnnx_4788 0 1 6060 value=-1 prim::Constant pnnx_4789 0 1 6061 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.5.attn 0 1 relative_position_bias_table.49 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.49=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.5.attn 0 1 relative_position_index.49 @relative_position_index=(64,64)i64 #relative_position_index.49=(64,64)i64 Tensor.view Tensor.view_1231 2 1 6045 6046 x_windows.49 $input=6045 $shape=6046 #6045=(1,6,6,8,8,192)f32 #x_windows.49=(36,8,8,192)f32 Tensor.view Tensor.view_1232 2 1 x_windows.49 6048 x2.49 $input=x_windows.49 $shape=6048 #x_windows.49=(36,8,8,192)f32 #x2.49=(36,64,192)f32 aten::size pnnx_4790 2 1 x2.49 6053 6069 #x2.49=(36,64,192)f32 prim::NumToTensor pnnx_4791 1 1 6069 B_.49 aten::Int pnnx_4792 1 1 B_.49 6071 aten::Int pnnx_4793 1 1 B_.49 6072 aten::size pnnx_4794 2 1 x2.49 6054 6073 #x2.49=(36,64,192)f32 prim::NumToTensor pnnx_4795 1 1 6073 N.49 aten::Int pnnx_4796 1 1 N.49 6075 aten::Int pnnx_4797 1 1 N.49 6076 aten::Int pnnx_4798 1 1 N.49 6077 aten::Int pnnx_4799 1 1 N.49 6078 aten::Int pnnx_4800 1 1 N.49 6079 aten::Int pnnx_4801 1 1 N.49 6080 aten::size pnnx_4802 2 1 x2.49 6055 6081 #x2.49=(36,64,192)f32 prim::NumToTensor pnnx_4803 1 1 6081 C.105 aten::Int pnnx_4804 1 1 C.105 6083 nn.Linear layers_dfe.3.residual_group.blocks.5.attn.qkv 1 1 x2.49 6084 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.49=(36,64,192)f32 #6084=(36,64,576)f32 aten::div pnnx_4805 3 1 C.105 6052 6051 6085 aten::Int pnnx_4806 1 1 6085 6086 prim::ListConstruct pnnx_4807 5 1 6072 6080 6056 6057 6086 6087 prim::Constant pnnx_4809 0 1 21501 value=2 prim::Constant pnnx_4810 0 1 21502 value=0 prim::Constant pnnx_4811 0 1 21503 value=3 prim::Constant pnnx_4812 0 1 21504 value=1 prim::ListConstruct pnnx_4813 5 1 21501 21502 21503 21504 6058 6089 Tensor.reshape Tensor.reshape_480 2 1 6084 6087 6088 $input=6084 $shape=6087 #6084=(36,64,576)f32 #6088=(36,64,3,6,32)f32 prim::Constant pnnx_4815 0 1 21505 value=0 prim::Constant pnnx_4816 0 1 21506 value=0 prim::Constant pnnx_4818 0 1 21507 value=0 prim::Constant pnnx_4819 0 1 21508 value=1 prim::Constant pnnx_4821 0 1 21509 value=0 prim::Constant pnnx_4822 0 1 21510 value=2 torch.permute torch.permute_2621 2 1 6088 6089 qkv0.49 $input=6088 $dims=6089 #6088=(36,64,3,6,32)f32 #qkv0.49=(3,36,6,64,32)f32 Tensor.select Tensor.select_719 3 1 qkv0.49 21505 21506 q.49 $input=qkv0.49 $dim=21505 $index=21506 #qkv0.49=(3,36,6,64,32)f32 #q.49=(36,6,64,32)f32 aten::mul pnnx_4824 2 1 q.49 6050 q0.49 #q.49=(36,6,64,32)f32 #q0.49=(36,6,64,32)f32 Tensor.select Tensor.select_720 3 1 qkv0.49 21507 21508 k.49 $input=qkv0.49 $dim=21507 $index=21508 #qkv0.49=(3,36,6,64,32)f32 #k.49=(36,6,64,32)f32 prim::Constant pnnx_4827 0 1 21511 value=-1 prim::ListConstruct pnnx_4828 1 1 21511 6097 Tensor.view Tensor.view_1233 2 1 relative_position_index.49 6097 6098 $input=relative_position_index.49 $shape=6097 #relative_position_index.49=(64,64)i64 #6098=(4096)i64 prim::ListConstruct pnnx_4830 1 1 6098 6099 #6098=(4096)i64 prim::Constant pnnx_4832 0 1 21512 value=64 prim::Constant pnnx_4833 0 1 21513 value=-1 prim::ListConstruct pnnx_4834 3 1 6061 21512 21513 6101 Tensor.index Tensor.index_349 2 1 relative_position_bias_table.49 6099 6100 $input=relative_position_bias_table.49 $expr=6099 #relative_position_bias_table.49=(225,6)f32 #6100=(4096,6)f32 prim::Constant pnnx_4836 0 1 21514 value=2 prim::Constant pnnx_4837 0 1 21515 value=0 prim::Constant pnnx_4838 0 1 21516 value=1 prim::ListConstruct pnnx_4839 3 1 21514 21515 21516 6103 Tensor.view Tensor.view_1234 2 1 6100 6101 relative_position_bias.49 $input=6100 $shape=6101 #6100=(4096,6)f32 #relative_position_bias.49=(64,64,6)f32 prim::Constant pnnx_4843 0 1 21518 value=0 torch.permute torch.permute_2622 2 1 relative_position_bias.49 6103 6104 $input=relative_position_bias.49 $dims=6103 #relative_position_bias.49=(64,64,6)f32 #6104=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_72 1 1 6104 relative_position_bias0.49 memory_format=torch.contiguous_format $input=6104 #6104=(6,64,64)f32 #relative_position_bias0.49=(6,64,64)f32 prim::Constant pnnx_4845 0 1 21519 value=1 torch.transpose torch.transpose_3013 3 1 k.49 6059 6060 6095 $input=k.49 $dim0=6059 $dim1=6060 #k.49=(36,6,64,32)f32 #6095=(36,6,32,64)f32 torch.matmul torch.matmul_2250 2 1 q0.49 6095 attn.99 $input=q0.49 $other=6095 #q0.49=(36,6,64,32)f32 #6095=(36,6,32,64)f32 #attn.99=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3263 2 1 relative_position_bias0.49 21518 6106 $input=relative_position_bias0.49 $dim=21518 #relative_position_bias0.49=(6,64,64)f32 #6106=(1,6,64,64)f32 aten::add pnnx_4846 3 1 attn.99 6106 21519 attn0.25 #attn.99=(36,6,64,64)f32 #6106=(1,6,64,64)f32 #attn0.25=(36,6,64,64)f32 prim::Constant pnnx_4847 0 1 21520 value=0 aten::size pnnx_4848 2 1 attn_mask.25 21520 6108 #attn_mask.25=(36,64,64)f32 prim::NumToTensor pnnx_4849 1 1 6108 other.25 aten::Int pnnx_4850 1 1 other.25 6110 prim::Constant pnnx_4851 0 1 21521 value=trunc aten::div pnnx_4852 3 1 B_.49 other.25 21521 6111 aten::Int pnnx_4853 1 1 6111 6112 prim::Constant pnnx_4854 0 1 21522 value=6 prim::ListConstruct pnnx_4855 5 1 6112 6110 21522 6079 6078 6113 prim::Constant pnnx_4857 0 1 21523 value=1 prim::Constant pnnx_4859 0 1 21524 value=0 prim::Constant pnnx_4861 0 1 21525 value=1 Tensor.view Tensor.view_1235 2 1 attn0.25 6113 6114 $input=attn0.25 $shape=6113 #attn0.25=(36,6,64,64)f32 #6114=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3264 2 1 attn_mask.25 21523 6115 $input=attn_mask.25 $dim=21523 #attn_mask.25=(36,64,64)f32 #6115=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3265 2 1 6115 21524 6116 $input=6115 $dim=21524 #6115=(36,1,64,64)f32 #6116=(1,36,1,64,64)f32 aten::add pnnx_4862 3 1 6114 6116 21525 attn1.25 #6114=(1,36,6,64,64)f32 #6116=(1,36,1,64,64)f32 #attn1.25=(1,36,6,64,64)f32 prim::Constant pnnx_4863 0 1 21526 value=-1 prim::Constant pnnx_4864 0 1 21527 value=6 prim::ListConstruct pnnx_4865 4 1 21526 21527 6077 6076 6118 Tensor.view Tensor.view_1236 2 1 attn1.25 6118 input.109 $input=attn1.25 $shape=6118 #attn1.25=(1,36,6,64,64)f32 #input.109=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.5.attn.softmax 1 1 input.109 6120 dim=-1 #input.109=(36,6,64,64)f32 #6120=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.attn.attn_drop 1 1 6120 6121 #6120=(36,6,64,64)f32 #6121=(36,6,64,64)f32 Tensor.select Tensor.select_721 3 1 qkv0.49 21509 21510 v.49 $input=qkv0.49 $dim=21509 $index=21510 #qkv0.49=(3,36,6,64,32)f32 #v.49=(36,6,64,32)f32 prim::Constant pnnx_4868 0 1 21528 value=1 prim::Constant pnnx_4869 0 1 21529 value=2 torch.matmul torch.matmul_2251 2 1 6121 v.49 6122 $input=6121 $other=v.49 #6121=(36,6,64,64)f32 #v.49=(36,6,64,32)f32 #6122=(36,6,64,32)f32 prim::ListConstruct pnnx_4871 3 1 6071 6075 6083 6124 torch.transpose torch.transpose_3014 3 1 6122 21528 21529 6123 $input=6122 $dim0=21528 $dim1=21529 #6122=(36,6,64,32)f32 #6123=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_481 2 1 6123 6124 input0.51 $input=6123 $shape=6124 #6123=(36,64,6,32)f32 #input0.51=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.5.attn.proj 1 1 input0.51 6126 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.51=(36,64,192)f32 #6126=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.attn.proj_drop 1 1 6126 6127 #6126=(36,64,192)f32 #6127=(36,64,192)f32 prim::Constant pnnx_4873 0 1 21530 value=-1 prim::Constant pnnx_4874 0 1 21531 value=8 prim::Constant pnnx_4875 0 1 21532 value=8 prim::ListConstruct pnnx_4876 4 1 21530 21531 21532 6017 6128 prim::Constant pnnx_4878 0 1 21533 value=8 prim::Constant pnnx_4879 0 1 21534 value=trunc aten::div pnnx_4880 3 1 H.1 21533 21534 6130 aten::Int pnnx_4881 1 1 6130 6131 prim::Constant pnnx_4882 0 1 21535 value=8 prim::Constant pnnx_4883 0 1 21536 value=trunc aten::div pnnx_4884 3 1 W.1 21535 21536 6132 aten::Int pnnx_4885 1 1 6132 6133 prim::Constant pnnx_4886 0 1 21537 value=1 prim::Constant pnnx_4887 0 1 21538 value=8 prim::Constant pnnx_4888 0 1 21539 value=8 prim::Constant pnnx_4889 0 1 21540 value=-1 prim::ListConstruct pnnx_4890 6 1 21537 6131 6133 21538 21539 21540 6134 prim::Constant pnnx_4892 0 1 21541 value=0 prim::Constant pnnx_4893 0 1 21542 value=1 prim::Constant pnnx_4894 0 1 21543 value=3 prim::Constant pnnx_4895 0 1 21544 value=2 prim::Constant pnnx_4896 0 1 21545 value=4 prim::Constant pnnx_4897 0 1 21546 value=5 prim::ListConstruct pnnx_4898 6 1 21541 21542 21543 21544 21545 21546 6136 Tensor.view Tensor.view_1237 2 1 6127 6128 windows.49 $input=6127 $shape=6128 #6127=(36,64,192)f32 #windows.49=(36,8,8,192)f32 Tensor.view Tensor.view_1238 2 1 windows.49 6134 x3.49 $input=windows.49 $shape=6134 #windows.49=(36,8,8,192)f32 #x3.49=(1,6,6,8,8,192)f32 prim::Constant pnnx_4902 0 1 21548 value=1 prim::Constant pnnx_4903 0 1 21549 value=-1 prim::ListConstruct pnnx_4904 4 1 21548 247 487 21549 6139 torch.permute torch.permute_2623 2 1 x3.49 6136 6137 $input=x3.49 $dims=6136 #x3.49=(1,6,6,8,8,192)f32 #6137=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_73 1 1 6137 6138 memory_format=torch.contiguous_format $input=6137 #6137=(1,6,8,6,8,192)f32 #6138=(1,6,8,6,8,192)f32 prim::Constant pnnx_4906 0 1 21550 value=4 prim::Constant pnnx_4907 0 1 21551 value=4 prim::ListConstruct pnnx_4908 2 1 21550 21551 6141 prim::Constant pnnx_4909 0 1 21552 value=1 prim::Constant pnnx_4910 0 1 21553 value=2 prim::ListConstruct pnnx_4911 2 1 21552 21553 6142 Tensor.view Tensor.view_1239 2 1 6138 6139 shifted_x.25 $input=6138 $shape=6139 #6138=(1,6,8,6,8,192)f32 #shifted_x.25=(1,48,48,192)f32 aten::mul pnnx_4913 2 1 H.1 W.1 6144 aten::Int pnnx_4914 1 1 6144 6145 prim::ListConstruct pnnx_4915 3 1 6012 6145 6016 6146 prim::Constant pnnx_4917 0 1 6148 value=None prim::Constant pnnx_4918 0 1 21554 value=1 torch.roll torch.roll_2443 3 1 shifted_x.25 6141 6142 x4.49 $input=shifted_x.25 $shifts=6141 $dims=6142 #shifted_x.25=(1,48,48,192)f32 #x4.49=(1,48,48,192)f32 Tensor.view Tensor.view_1240 2 1 x4.49 6146 x5.25 $input=x4.49 $shape=6146 #x4.49=(1,48,48,192)f32 #x5.25=(1,2304,192)f32 aten::add pnnx_4919 3 1 5991 x5.25 21554 input.111 #5991=(1,2304,192)f32 #x5.25=(1,2304,192)f32 #input.111=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.5.norm2 1 1 input.111 6150 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.111=(1,2304,192)f32 #6150=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.5.mlp.fc1 1 1 6150 6155 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #6150=(1,2304,192)f32 #6155=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.5.mlp.act 1 1 6155 6156 #6155=(1,2304,384)f32 #6156=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.mlp.drop 1 1 6156 6157 #6156=(1,2304,384)f32 #6157=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.5.mlp.fc2 1 1 6157 6158 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #6157=(1,2304,384)f32 #6158=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.mlp.drop 1 1 6158 6159 #6158=(1,2304,192)f32 #6159=(1,2304,192)f32 prim::Constant pnnx_4920 0 1 6160 value=None prim::Constant pnnx_4921 0 1 21555 value=1 aten::add pnnx_4922 3 1 input.111 6159 21555 6161 #input.111=(1,2304,192)f32 #6159=(1,2304,192)f32 #6161=(1,2304,192)f32 prim::Constant pnnx_4923 0 1 6162 value=0 prim::Constant pnnx_4924 0 1 6163 value=1 prim::Constant pnnx_4925 0 1 6164 value=2 prim::Constant pnnx_4926 0 1 6165 value=192 aten::size pnnx_4927 2 1 6161 6162 6166 #6161=(1,2304,192)f32 prim::NumToTensor pnnx_4928 1 1 6166 B.59 aten::Int pnnx_4929 1 1 B.59 6168 prim::ListConstruct pnnx_4931 4 1 6168 6165 244 484 6170 torch.transpose torch.transpose_3015 3 1 6161 6163 6164 6169 $input=6161 $dim0=6163 $dim1=6164 #6161=(1,2304,192)f32 #6169=(1,192,2304)f32 Tensor.view Tensor.view_1241 2 1 6169 6170 input.113 $input=6169 $shape=6170 #6169=(1,192,2304)f32 #input.113=(1,192,48,48)f32 nn.Conv2d layers_dfe.3.conv 1 1 input.113 6172 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.113=(1,192,48,48)f32 #6172=(1,192,48,48)f32 prim::Constant pnnx_4933 0 1 6173 value=-1 prim::Constant pnnx_4934 0 1 6174 value=2 prim::Constant pnnx_4935 0 1 6175 value=1 prim::Constant pnnx_4937 0 1 21556 value=2 torch.flatten torch.flatten_2187 3 1 6172 6174 6173 6176 $input=6172 $start_dim=6174 $end_dim=6173 #6172=(1,192,48,48)f32 #6176=(1,192,2304)f32 torch.transpose torch.transpose_3016 3 1 6176 6175 21556 6177 $input=6176 $dim0=6175 $dim1=21556 #6176=(1,192,2304)f32 #6177=(1,2304,192)f32 aten::add pnnx_4939 3 1 6177 5196 5197 6178 #6177=(1,2304,192)f32 #5196=(1,2304,192)f32 #6178=(1,2304,192)f32 prim::Constant pnnx_4940 0 1 6179 value=1 prim::Constant pnnx_4941 0 1 6196 value=trunc prim::Constant pnnx_4942 0 1 6197 value=8 prim::Constant pnnx_4943 0 1 6198 value=0 prim::Constant pnnx_4944 0 1 6199 value=2 prim::Constant pnnx_4945 0 1 6200 value=1 prim::Constant pnnx_4946 0 1 6201 value=3 prim::Constant pnnx_4947 0 1 6202 value=8 prim::Constant pnnx_4948 0 1 6203 value=4 prim::Constant pnnx_4949 0 1 6204 value=5 prim::Constant pnnx_4950 0 1 6205 value=-1 prim::Constant pnnx_4951 0 1 6206 value=64 aten::size pnnx_4952 2 1 6178 6198 6212 #6178=(1,2304,192)f32 prim::NumToTensor pnnx_4953 1 1 6212 B.61 aten::Int pnnx_4954 1 1 B.61 6214 aten::Int pnnx_4955 1 1 B.61 6215 aten::size pnnx_4956 2 1 6178 6199 6216 #6178=(1,2304,192)f32 prim::NumToTensor pnnx_4957 1 1 6216 C.107 aten::Int pnnx_4958 1 1 C.107 6218 aten::Int pnnx_4959 1 1 C.107 6219 aten::Int pnnx_4960 1 1 C.107 6220 aten::Int pnnx_4961 1 1 C.107 6221 nn.LayerNorm layers_dfe.4.residual_group.blocks.0.norm1 1 1 6178 6222 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #6178=(1,2304,192)f32 #6222=(1,2304,192)f32 prim::ListConstruct pnnx_4962 4 1 6215 241 481 6221 6223 prim::Constant pnnx_4964 0 1 21557 value=0 Tensor.view Tensor.view_1242 2 1 6222 6223 x.51 $input=6222 $shape=6223 #6222=(1,2304,192)f32 #x.51=(1,48,48,192)f32 aten::size pnnx_4965 2 1 x.51 21557 6225 #x.51=(1,48,48,192)f32 prim::NumToTensor pnnx_4966 1 1 6225 B0.51 aten::Int pnnx_4967 1 1 B0.51 6227 aten::size pnnx_4968 2 1 x.51 6200 6228 #x.51=(1,48,48,192)f32 prim::NumToTensor pnnx_4969 1 1 6228 6229 prim::Constant pnnx_4970 0 1 21558 value=2 aten::size pnnx_4971 2 1 x.51 21558 6230 #x.51=(1,48,48,192)f32 prim::NumToTensor pnnx_4972 1 1 6230 6231 aten::size pnnx_4973 2 1 x.51 6201 6232 #x.51=(1,48,48,192)f32 prim::NumToTensor pnnx_4974 1 1 6232 C0.51 aten::Int pnnx_4975 1 1 C0.51 6234 aten::Int pnnx_4976 1 1 C0.51 6235 aten::div pnnx_4977 3 1 6229 6197 6196 6236 aten::Int pnnx_4978 1 1 6236 6237 prim::Constant pnnx_4979 0 1 21559 value=8 prim::Constant pnnx_4980 0 1 21560 value=trunc aten::div pnnx_4981 3 1 6231 21559 21560 6238 aten::Int pnnx_4982 1 1 6238 6239 prim::Constant pnnx_4983 0 1 21561 value=8 prim::ListConstruct pnnx_4984 6 1 6227 6237 6202 6239 21561 6235 6240 prim::Constant pnnx_4986 0 1 21562 value=0 prim::Constant pnnx_4987 0 1 21563 value=1 prim::Constant pnnx_4988 0 1 21564 value=3 prim::Constant pnnx_4989 0 1 21565 value=2 prim::ListConstruct pnnx_4990 6 1 21562 21563 21564 21565 6203 6204 6242 Tensor.view Tensor.view_1243 2 1 x.51 6240 x0.51 $input=x.51 $shape=6240 #x.51=(1,48,48,192)f32 #x0.51=(1,6,8,6,8,192)f32 prim::Constant pnnx_4994 0 1 21567 value=8 prim::Constant pnnx_4995 0 1 21568 value=8 prim::ListConstruct pnnx_4996 4 1 6205 21567 21568 6234 6245 torch.permute torch.permute_2624 2 1 x0.51 6242 6243 $input=x0.51 $dims=6242 #x0.51=(1,6,8,6,8,192)f32 #6243=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_74 1 1 6243 6244 memory_format=torch.contiguous_format $input=6243 #6243=(1,6,6,8,8,192)f32 #6244=(1,6,6,8,8,192)f32 prim::Constant pnnx_4998 0 1 21569 value=-1 prim::ListConstruct pnnx_4999 3 1 21569 6206 6220 6247 prim::Constant pnnx_5001 0 1 6249 value=1.767767e-01 prim::Constant pnnx_5002 0 1 6250 value=trunc prim::Constant pnnx_5003 0 1 6251 value=6 prim::Constant pnnx_5004 0 1 6252 value=0 prim::Constant pnnx_5005 0 1 6253 value=1 prim::Constant pnnx_5006 0 1 6254 value=2 prim::Constant pnnx_5007 0 1 6255 value=3 prim::Constant pnnx_5008 0 1 6256 value=6 prim::Constant pnnx_5009 0 1 6257 value=4 prim::Constant pnnx_5010 0 1 6258 value=-2 prim::Constant pnnx_5011 0 1 6259 value=-1 prim::Constant pnnx_5012 0 1 6260 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.0.attn 0 1 relative_position_bias_table.51 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.51=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.0.attn 0 1 relative_position_index.51 @relative_position_index=(64,64)i64 #relative_position_index.51=(64,64)i64 Tensor.view Tensor.view_1244 2 1 6244 6245 x_windows.51 $input=6244 $shape=6245 #6244=(1,6,6,8,8,192)f32 #x_windows.51=(36,8,8,192)f32 Tensor.view Tensor.view_1245 2 1 x_windows.51 6247 x1.51 $input=x_windows.51 $shape=6247 #x_windows.51=(36,8,8,192)f32 #x1.51=(36,64,192)f32 aten::size pnnx_5013 2 1 x1.51 6252 6268 #x1.51=(36,64,192)f32 prim::NumToTensor pnnx_5014 1 1 6268 B_.51 aten::Int pnnx_5015 1 1 B_.51 6270 aten::Int pnnx_5016 1 1 B_.51 6271 aten::size pnnx_5017 2 1 x1.51 6253 6272 #x1.51=(36,64,192)f32 prim::NumToTensor pnnx_5018 1 1 6272 N.51 aten::Int pnnx_5019 1 1 N.51 6274 aten::Int pnnx_5020 1 1 N.51 6275 aten::size pnnx_5021 2 1 x1.51 6254 6276 #x1.51=(36,64,192)f32 prim::NumToTensor pnnx_5022 1 1 6276 C.109 aten::Int pnnx_5023 1 1 C.109 6278 nn.Linear layers_dfe.4.residual_group.blocks.0.attn.qkv 1 1 x1.51 6279 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.51=(36,64,192)f32 #6279=(36,64,576)f32 aten::div pnnx_5024 3 1 C.109 6251 6250 6280 aten::Int pnnx_5025 1 1 6280 6281 prim::ListConstruct pnnx_5026 5 1 6271 6275 6255 6256 6281 6282 prim::Constant pnnx_5028 0 1 21570 value=2 prim::Constant pnnx_5029 0 1 21571 value=0 prim::Constant pnnx_5030 0 1 21572 value=3 prim::Constant pnnx_5031 0 1 21573 value=1 prim::ListConstruct pnnx_5032 5 1 21570 21571 21572 21573 6257 6284 Tensor.reshape Tensor.reshape_482 2 1 6279 6282 6283 $input=6279 $shape=6282 #6279=(36,64,576)f32 #6283=(36,64,3,6,32)f32 prim::Constant pnnx_5034 0 1 21574 value=0 prim::Constant pnnx_5035 0 1 21575 value=0 prim::Constant pnnx_5037 0 1 21576 value=0 prim::Constant pnnx_5038 0 1 21577 value=1 prim::Constant pnnx_5040 0 1 21578 value=0 prim::Constant pnnx_5041 0 1 21579 value=2 torch.permute torch.permute_2625 2 1 6283 6284 qkv0.51 $input=6283 $dims=6284 #6283=(36,64,3,6,32)f32 #qkv0.51=(3,36,6,64,32)f32 Tensor.select Tensor.select_722 3 1 qkv0.51 21574 21575 q.51 $input=qkv0.51 $dim=21574 $index=21575 #qkv0.51=(3,36,6,64,32)f32 #q.51=(36,6,64,32)f32 aten::mul pnnx_5043 2 1 q.51 6249 q0.51 #q.51=(36,6,64,32)f32 #q0.51=(36,6,64,32)f32 Tensor.select Tensor.select_723 3 1 qkv0.51 21576 21577 k.51 $input=qkv0.51 $dim=21576 $index=21577 #qkv0.51=(3,36,6,64,32)f32 #k.51=(36,6,64,32)f32 prim::Constant pnnx_5046 0 1 21580 value=-1 prim::ListConstruct pnnx_5047 1 1 21580 6292 Tensor.view Tensor.view_1246 2 1 relative_position_index.51 6292 6293 $input=relative_position_index.51 $shape=6292 #relative_position_index.51=(64,64)i64 #6293=(4096)i64 prim::ListConstruct pnnx_5049 1 1 6293 6294 #6293=(4096)i64 prim::Constant pnnx_5051 0 1 21581 value=64 prim::Constant pnnx_5052 0 1 21582 value=-1 prim::ListConstruct pnnx_5053 3 1 6260 21581 21582 6296 Tensor.index Tensor.index_350 2 1 relative_position_bias_table.51 6294 6295 $input=relative_position_bias_table.51 $expr=6294 #relative_position_bias_table.51=(225,6)f32 #6295=(4096,6)f32 prim::Constant pnnx_5055 0 1 21583 value=2 prim::Constant pnnx_5056 0 1 21584 value=0 prim::Constant pnnx_5057 0 1 21585 value=1 prim::ListConstruct pnnx_5058 3 1 21583 21584 21585 6298 Tensor.view Tensor.view_1247 2 1 6295 6296 relative_position_bias.51 $input=6295 $shape=6296 #6295=(4096,6)f32 #relative_position_bias.51=(64,64,6)f32 prim::Constant pnnx_5062 0 1 21587 value=0 torch.permute torch.permute_2626 2 1 relative_position_bias.51 6298 6299 $input=relative_position_bias.51 $dims=6298 #relative_position_bias.51=(64,64,6)f32 #6299=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_75 1 1 6299 relative_position_bias0.51 memory_format=torch.contiguous_format $input=6299 #6299=(6,64,64)f32 #relative_position_bias0.51=(6,64,64)f32 prim::Constant pnnx_5064 0 1 21588 value=1 torch.transpose torch.transpose_3017 3 1 k.51 6258 6259 6290 $input=k.51 $dim0=6258 $dim1=6259 #k.51=(36,6,64,32)f32 #6290=(36,6,32,64)f32 torch.matmul torch.matmul_2252 2 1 q0.51 6290 attn.103 $input=q0.51 $other=6290 #q0.51=(36,6,64,32)f32 #6290=(36,6,32,64)f32 #attn.103=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3266 2 1 relative_position_bias0.51 21587 6301 $input=relative_position_bias0.51 $dim=21587 #relative_position_bias0.51=(6,64,64)f32 #6301=(1,6,64,64)f32 aten::add pnnx_5065 3 1 attn.103 6301 21588 input.115 #attn.103=(36,6,64,64)f32 #6301=(1,6,64,64)f32 #input.115=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.0.attn.softmax 1 1 input.115 6303 dim=-1 #input.115=(36,6,64,64)f32 #6303=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.attn.attn_drop 1 1 6303 6304 #6303=(36,6,64,64)f32 #6304=(36,6,64,64)f32 Tensor.select Tensor.select_724 3 1 qkv0.51 21578 21579 v.51 $input=qkv0.51 $dim=21578 $index=21579 #qkv0.51=(3,36,6,64,32)f32 #v.51=(36,6,64,32)f32 prim::Constant pnnx_5067 0 1 21589 value=1 prim::Constant pnnx_5068 0 1 21590 value=2 torch.matmul torch.matmul_2253 2 1 6304 v.51 6305 $input=6304 $other=v.51 #6304=(36,6,64,64)f32 #v.51=(36,6,64,32)f32 #6305=(36,6,64,32)f32 prim::ListConstruct pnnx_5070 3 1 6270 6274 6278 6307 torch.transpose torch.transpose_3018 3 1 6305 21589 21590 6306 $input=6305 $dim0=21589 $dim1=21590 #6305=(36,6,64,32)f32 #6306=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_483 2 1 6306 6307 input0.53 $input=6306 $shape=6307 #6306=(36,64,6,32)f32 #input0.53=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.0.attn.proj 1 1 input0.53 6309 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.53=(36,64,192)f32 #6309=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.attn.proj_drop 1 1 6309 6310 #6309=(36,64,192)f32 #6310=(36,64,192)f32 prim::Constant pnnx_5072 0 1 21591 value=-1 prim::Constant pnnx_5073 0 1 21592 value=8 prim::Constant pnnx_5074 0 1 21593 value=8 prim::ListConstruct pnnx_5075 4 1 21591 21592 21593 6219 6311 prim::Constant pnnx_5077 0 1 21594 value=8 prim::Constant pnnx_5078 0 1 21595 value=trunc aten::div pnnx_5079 3 1 H.1 21594 21595 6313 aten::Int pnnx_5080 1 1 6313 6314 prim::Constant pnnx_5081 0 1 21596 value=8 prim::Constant pnnx_5082 0 1 21597 value=trunc aten::div pnnx_5083 3 1 W.1 21596 21597 6315 aten::Int pnnx_5084 1 1 6315 6316 prim::Constant pnnx_5085 0 1 21598 value=1 prim::Constant pnnx_5086 0 1 21599 value=8 prim::Constant pnnx_5087 0 1 21600 value=8 prim::Constant pnnx_5088 0 1 21601 value=-1 prim::ListConstruct pnnx_5089 6 1 21598 6314 6316 21599 21600 21601 6317 prim::Constant pnnx_5091 0 1 21602 value=0 prim::Constant pnnx_5092 0 1 21603 value=1 prim::Constant pnnx_5093 0 1 21604 value=3 prim::Constant pnnx_5094 0 1 21605 value=2 prim::Constant pnnx_5095 0 1 21606 value=4 prim::Constant pnnx_5096 0 1 21607 value=5 prim::ListConstruct pnnx_5097 6 1 21602 21603 21604 21605 21606 21607 6319 Tensor.view Tensor.view_1248 2 1 6310 6311 windows.51 $input=6310 $shape=6311 #6310=(36,64,192)f32 #windows.51=(36,8,8,192)f32 Tensor.view Tensor.view_1249 2 1 windows.51 6317 x2.51 $input=windows.51 $shape=6317 #windows.51=(36,8,8,192)f32 #x2.51=(1,6,6,8,8,192)f32 prim::Constant pnnx_5101 0 1 21609 value=1 prim::Constant pnnx_5102 0 1 21610 value=-1 prim::ListConstruct pnnx_5103 4 1 21609 238 478 21610 6322 torch.permute torch.permute_2627 2 1 x2.51 6319 6320 $input=x2.51 $dims=6319 #x2.51=(1,6,6,8,8,192)f32 #6320=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_76 1 1 6320 6321 memory_format=torch.contiguous_format $input=6320 #6320=(1,6,8,6,8,192)f32 #6321=(1,6,8,6,8,192)f32 aten::mul pnnx_5105 2 1 H.1 W.1 6324 aten::Int pnnx_5106 1 1 6324 6325 prim::ListConstruct pnnx_5107 3 1 6214 6325 6218 6326 prim::Constant pnnx_5109 0 1 6328 value=None prim::Constant pnnx_5110 0 1 21611 value=1 Tensor.view Tensor.view_1250 2 1 6321 6322 x3.51 $input=6321 $shape=6322 #6321=(1,6,8,6,8,192)f32 #x3.51=(1,48,48,192)f32 Tensor.view Tensor.view_1251 2 1 x3.51 6326 x4.51 $input=x3.51 $shape=6326 #x3.51=(1,48,48,192)f32 #x4.51=(1,2304,192)f32 aten::add pnnx_5111 3 1 6178 x4.51 21611 input.117 #6178=(1,2304,192)f32 #x4.51=(1,2304,192)f32 #input.117=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.0.norm2 1 1 input.117 6330 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.117=(1,2304,192)f32 #6330=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.0.mlp.fc1 1 1 6330 6335 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #6330=(1,2304,192)f32 #6335=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.0.mlp.act 1 1 6335 6336 #6335=(1,2304,384)f32 #6336=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.mlp.drop 1 1 6336 6337 #6336=(1,2304,384)f32 #6337=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.0.mlp.fc2 1 1 6337 6338 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #6337=(1,2304,384)f32 #6338=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.mlp.drop 1 1 6338 6339 #6338=(1,2304,192)f32 #6339=(1,2304,192)f32 prim::Constant pnnx_5112 0 1 6340 value=None prim::Constant pnnx_5113 0 1 21612 value=1 aten::add pnnx_5114 3 1 input.117 6339 21612 6341 #input.117=(1,2304,192)f32 #6339=(1,2304,192)f32 #6341=(1,2304,192)f32 prim::Constant pnnx_5115 0 1 6342 value=trunc prim::Constant pnnx_5116 0 1 6343 value=8 prim::Constant pnnx_5117 0 1 6344 value=0 prim::Constant pnnx_5118 0 1 6345 value=2 prim::Constant pnnx_5119 0 1 6346 value=-4 prim::Constant pnnx_5120 0 1 6347 value=1 prim::Constant pnnx_5121 0 1 6348 value=3 prim::Constant pnnx_5122 0 1 6349 value=8 prim::Constant pnnx_5123 0 1 6350 value=4 prim::Constant pnnx_5124 0 1 6351 value=5 prim::Constant pnnx_5125 0 1 6352 value=-1 prim::Constant pnnx_5126 0 1 6353 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.1 0 1 attn_mask.27 @attn_mask=(36,64,64)f32 #attn_mask.27=(36,64,64)f32 aten::size pnnx_5127 2 1 6341 6344 6360 #6341=(1,2304,192)f32 prim::NumToTensor pnnx_5128 1 1 6360 B.63 aten::Int pnnx_5129 1 1 B.63 6362 aten::Int pnnx_5130 1 1 B.63 6363 aten::size pnnx_5131 2 1 6341 6345 6364 #6341=(1,2304,192)f32 prim::NumToTensor pnnx_5132 1 1 6364 C.111 aten::Int pnnx_5133 1 1 C.111 6366 aten::Int pnnx_5134 1 1 C.111 6367 aten::Int pnnx_5135 1 1 C.111 6368 aten::Int pnnx_5136 1 1 C.111 6369 nn.LayerNorm layers_dfe.4.residual_group.blocks.1.norm1 1 1 6341 6370 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #6341=(1,2304,192)f32 #6370=(1,2304,192)f32 prim::ListConstruct pnnx_5137 4 1 6363 235 475 6369 6371 prim::Constant pnnx_5139 0 1 21613 value=-4 prim::ListConstruct pnnx_5140 2 1 6346 21613 6373 prim::Constant pnnx_5141 0 1 21614 value=2 prim::ListConstruct pnnx_5142 2 1 6347 21614 6374 Tensor.view Tensor.view_1252 2 1 6370 6371 x.53 $input=6370 $shape=6371 #6370=(1,2304,192)f32 #x.53=(1,48,48,192)f32 prim::Constant pnnx_5144 0 1 21615 value=0 torch.roll torch.roll_2444 3 1 x.53 6373 6374 x0.53 $input=x.53 $shifts=6373 $dims=6374 #x.53=(1,48,48,192)f32 #x0.53=(1,48,48,192)f32 aten::size pnnx_5145 2 1 x0.53 21615 6376 #x0.53=(1,48,48,192)f32 prim::NumToTensor pnnx_5146 1 1 6376 B0.53 aten::Int pnnx_5147 1 1 B0.53 6378 prim::Constant pnnx_5148 0 1 21616 value=1 aten::size pnnx_5149 2 1 x0.53 21616 6379 #x0.53=(1,48,48,192)f32 prim::NumToTensor pnnx_5150 1 1 6379 6380 prim::Constant pnnx_5151 0 1 21617 value=2 aten::size pnnx_5152 2 1 x0.53 21617 6381 #x0.53=(1,48,48,192)f32 prim::NumToTensor pnnx_5153 1 1 6381 6382 aten::size pnnx_5154 2 1 x0.53 6348 6383 #x0.53=(1,48,48,192)f32 prim::NumToTensor pnnx_5155 1 1 6383 C0.53 aten::Int pnnx_5156 1 1 C0.53 6385 aten::Int pnnx_5157 1 1 C0.53 6386 aten::div pnnx_5158 3 1 6380 6343 6342 6387 aten::Int pnnx_5159 1 1 6387 6388 prim::Constant pnnx_5160 0 1 21618 value=8 prim::Constant pnnx_5161 0 1 21619 value=trunc aten::div pnnx_5162 3 1 6382 21618 21619 6389 aten::Int pnnx_5163 1 1 6389 6390 prim::Constant pnnx_5164 0 1 21620 value=8 prim::ListConstruct pnnx_5165 6 1 6378 6388 6349 6390 21620 6386 6391 prim::Constant pnnx_5167 0 1 21621 value=0 prim::Constant pnnx_5168 0 1 21622 value=1 prim::Constant pnnx_5169 0 1 21623 value=3 prim::Constant pnnx_5170 0 1 21624 value=2 prim::ListConstruct pnnx_5171 6 1 21621 21622 21623 21624 6350 6351 6393 Tensor.view Tensor.view_1253 2 1 x0.53 6391 x1.53 $input=x0.53 $shape=6391 #x0.53=(1,48,48,192)f32 #x1.53=(1,6,8,6,8,192)f32 prim::Constant pnnx_5175 0 1 21626 value=8 prim::Constant pnnx_5176 0 1 21627 value=8 prim::ListConstruct pnnx_5177 4 1 6352 21626 21627 6385 6396 torch.permute torch.permute_2628 2 1 x1.53 6393 6394 $input=x1.53 $dims=6393 #x1.53=(1,6,8,6,8,192)f32 #6394=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_77 1 1 6394 6395 memory_format=torch.contiguous_format $input=6394 #6394=(1,6,6,8,8,192)f32 #6395=(1,6,6,8,8,192)f32 prim::Constant pnnx_5179 0 1 21628 value=-1 prim::ListConstruct pnnx_5180 3 1 21628 6353 6368 6398 prim::Constant pnnx_5182 0 1 6400 value=1.767767e-01 prim::Constant pnnx_5183 0 1 6401 value=trunc prim::Constant pnnx_5184 0 1 6402 value=6 prim::Constant pnnx_5185 0 1 6403 value=0 prim::Constant pnnx_5186 0 1 6404 value=1 prim::Constant pnnx_5187 0 1 6405 value=2 prim::Constant pnnx_5188 0 1 6406 value=3 prim::Constant pnnx_5189 0 1 6407 value=6 prim::Constant pnnx_5190 0 1 6408 value=4 prim::Constant pnnx_5191 0 1 6409 value=-2 prim::Constant pnnx_5192 0 1 6410 value=-1 prim::Constant pnnx_5193 0 1 6411 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.1.attn 0 1 relative_position_bias_table.53 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.53=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.1.attn 0 1 relative_position_index.53 @relative_position_index=(64,64)i64 #relative_position_index.53=(64,64)i64 Tensor.view Tensor.view_1254 2 1 6395 6396 x_windows.53 $input=6395 $shape=6396 #6395=(1,6,6,8,8,192)f32 #x_windows.53=(36,8,8,192)f32 Tensor.view Tensor.view_1255 2 1 x_windows.53 6398 x2.53 $input=x_windows.53 $shape=6398 #x_windows.53=(36,8,8,192)f32 #x2.53=(36,64,192)f32 aten::size pnnx_5194 2 1 x2.53 6403 6419 #x2.53=(36,64,192)f32 prim::NumToTensor pnnx_5195 1 1 6419 B_.53 aten::Int pnnx_5196 1 1 B_.53 6421 aten::Int pnnx_5197 1 1 B_.53 6422 aten::size pnnx_5198 2 1 x2.53 6404 6423 #x2.53=(36,64,192)f32 prim::NumToTensor pnnx_5199 1 1 6423 N.53 aten::Int pnnx_5200 1 1 N.53 6425 aten::Int pnnx_5201 1 1 N.53 6426 aten::Int pnnx_5202 1 1 N.53 6427 aten::Int pnnx_5203 1 1 N.53 6428 aten::Int pnnx_5204 1 1 N.53 6429 aten::Int pnnx_5205 1 1 N.53 6430 aten::size pnnx_5206 2 1 x2.53 6405 6431 #x2.53=(36,64,192)f32 prim::NumToTensor pnnx_5207 1 1 6431 C.113 aten::Int pnnx_5208 1 1 C.113 6433 nn.Linear layers_dfe.4.residual_group.blocks.1.attn.qkv 1 1 x2.53 6434 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.53=(36,64,192)f32 #6434=(36,64,576)f32 aten::div pnnx_5209 3 1 C.113 6402 6401 6435 aten::Int pnnx_5210 1 1 6435 6436 prim::ListConstruct pnnx_5211 5 1 6422 6430 6406 6407 6436 6437 prim::Constant pnnx_5213 0 1 21629 value=2 prim::Constant pnnx_5214 0 1 21630 value=0 prim::Constant pnnx_5215 0 1 21631 value=3 prim::Constant pnnx_5216 0 1 21632 value=1 prim::ListConstruct pnnx_5217 5 1 21629 21630 21631 21632 6408 6439 Tensor.reshape Tensor.reshape_484 2 1 6434 6437 6438 $input=6434 $shape=6437 #6434=(36,64,576)f32 #6438=(36,64,3,6,32)f32 prim::Constant pnnx_5219 0 1 21633 value=0 prim::Constant pnnx_5220 0 1 21634 value=0 prim::Constant pnnx_5222 0 1 21635 value=0 prim::Constant pnnx_5223 0 1 21636 value=1 prim::Constant pnnx_5225 0 1 21637 value=0 prim::Constant pnnx_5226 0 1 21638 value=2 torch.permute torch.permute_2629 2 1 6438 6439 qkv0.53 $input=6438 $dims=6439 #6438=(36,64,3,6,32)f32 #qkv0.53=(3,36,6,64,32)f32 Tensor.select Tensor.select_725 3 1 qkv0.53 21633 21634 q.53 $input=qkv0.53 $dim=21633 $index=21634 #qkv0.53=(3,36,6,64,32)f32 #q.53=(36,6,64,32)f32 aten::mul pnnx_5228 2 1 q.53 6400 q0.53 #q.53=(36,6,64,32)f32 #q0.53=(36,6,64,32)f32 Tensor.select Tensor.select_726 3 1 qkv0.53 21635 21636 k.53 $input=qkv0.53 $dim=21635 $index=21636 #qkv0.53=(3,36,6,64,32)f32 #k.53=(36,6,64,32)f32 prim::Constant pnnx_5231 0 1 21639 value=-1 prim::ListConstruct pnnx_5232 1 1 21639 6447 Tensor.view Tensor.view_1256 2 1 relative_position_index.53 6447 6448 $input=relative_position_index.53 $shape=6447 #relative_position_index.53=(64,64)i64 #6448=(4096)i64 prim::ListConstruct pnnx_5234 1 1 6448 6449 #6448=(4096)i64 prim::Constant pnnx_5236 0 1 21640 value=64 prim::Constant pnnx_5237 0 1 21641 value=-1 prim::ListConstruct pnnx_5238 3 1 6411 21640 21641 6451 Tensor.index Tensor.index_351 2 1 relative_position_bias_table.53 6449 6450 $input=relative_position_bias_table.53 $expr=6449 #relative_position_bias_table.53=(225,6)f32 #6450=(4096,6)f32 prim::Constant pnnx_5240 0 1 21642 value=2 prim::Constant pnnx_5241 0 1 21643 value=0 prim::Constant pnnx_5242 0 1 21644 value=1 prim::ListConstruct pnnx_5243 3 1 21642 21643 21644 6453 Tensor.view Tensor.view_1257 2 1 6450 6451 relative_position_bias.53 $input=6450 $shape=6451 #6450=(4096,6)f32 #relative_position_bias.53=(64,64,6)f32 prim::Constant pnnx_5247 0 1 21646 value=0 torch.permute torch.permute_2630 2 1 relative_position_bias.53 6453 6454 $input=relative_position_bias.53 $dims=6453 #relative_position_bias.53=(64,64,6)f32 #6454=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_78 1 1 6454 relative_position_bias0.53 memory_format=torch.contiguous_format $input=6454 #6454=(6,64,64)f32 #relative_position_bias0.53=(6,64,64)f32 prim::Constant pnnx_5249 0 1 21647 value=1 torch.transpose torch.transpose_3019 3 1 k.53 6409 6410 6445 $input=k.53 $dim0=6409 $dim1=6410 #k.53=(36,6,64,32)f32 #6445=(36,6,32,64)f32 torch.matmul torch.matmul_2254 2 1 q0.53 6445 attn.107 $input=q0.53 $other=6445 #q0.53=(36,6,64,32)f32 #6445=(36,6,32,64)f32 #attn.107=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3267 2 1 relative_position_bias0.53 21646 6456 $input=relative_position_bias0.53 $dim=21646 #relative_position_bias0.53=(6,64,64)f32 #6456=(1,6,64,64)f32 aten::add pnnx_5250 3 1 attn.107 6456 21647 attn0.27 #attn.107=(36,6,64,64)f32 #6456=(1,6,64,64)f32 #attn0.27=(36,6,64,64)f32 prim::Constant pnnx_5251 0 1 21648 value=0 aten::size pnnx_5252 2 1 attn_mask.27 21648 6458 #attn_mask.27=(36,64,64)f32 prim::NumToTensor pnnx_5253 1 1 6458 other.27 aten::Int pnnx_5254 1 1 other.27 6460 prim::Constant pnnx_5255 0 1 21649 value=trunc aten::div pnnx_5256 3 1 B_.53 other.27 21649 6461 aten::Int pnnx_5257 1 1 6461 6462 prim::Constant pnnx_5258 0 1 21650 value=6 prim::ListConstruct pnnx_5259 5 1 6462 6460 21650 6429 6428 6463 prim::Constant pnnx_5261 0 1 21651 value=1 prim::Constant pnnx_5263 0 1 21652 value=0 prim::Constant pnnx_5265 0 1 21653 value=1 Tensor.view Tensor.view_1258 2 1 attn0.27 6463 6464 $input=attn0.27 $shape=6463 #attn0.27=(36,6,64,64)f32 #6464=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3268 2 1 attn_mask.27 21651 6465 $input=attn_mask.27 $dim=21651 #attn_mask.27=(36,64,64)f32 #6465=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3269 2 1 6465 21652 6466 $input=6465 $dim=21652 #6465=(36,1,64,64)f32 #6466=(1,36,1,64,64)f32 aten::add pnnx_5266 3 1 6464 6466 21653 attn1.27 #6464=(1,36,6,64,64)f32 #6466=(1,36,1,64,64)f32 #attn1.27=(1,36,6,64,64)f32 prim::Constant pnnx_5267 0 1 21654 value=-1 prim::Constant pnnx_5268 0 1 21655 value=6 prim::ListConstruct pnnx_5269 4 1 21654 21655 6427 6426 6468 Tensor.view Tensor.view_1259 2 1 attn1.27 6468 input.119 $input=attn1.27 $shape=6468 #attn1.27=(1,36,6,64,64)f32 #input.119=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.1.attn.softmax 1 1 input.119 6470 dim=-1 #input.119=(36,6,64,64)f32 #6470=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.attn.attn_drop 1 1 6470 6471 #6470=(36,6,64,64)f32 #6471=(36,6,64,64)f32 Tensor.select Tensor.select_727 3 1 qkv0.53 21637 21638 v.53 $input=qkv0.53 $dim=21637 $index=21638 #qkv0.53=(3,36,6,64,32)f32 #v.53=(36,6,64,32)f32 prim::Constant pnnx_5272 0 1 21656 value=1 prim::Constant pnnx_5273 0 1 21657 value=2 torch.matmul torch.matmul_2255 2 1 6471 v.53 6472 $input=6471 $other=v.53 #6471=(36,6,64,64)f32 #v.53=(36,6,64,32)f32 #6472=(36,6,64,32)f32 prim::ListConstruct pnnx_5275 3 1 6421 6425 6433 6474 torch.transpose torch.transpose_3020 3 1 6472 21656 21657 6473 $input=6472 $dim0=21656 $dim1=21657 #6472=(36,6,64,32)f32 #6473=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_485 2 1 6473 6474 input0.55 $input=6473 $shape=6474 #6473=(36,64,6,32)f32 #input0.55=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.1.attn.proj 1 1 input0.55 6476 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.55=(36,64,192)f32 #6476=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.attn.proj_drop 1 1 6476 6477 #6476=(36,64,192)f32 #6477=(36,64,192)f32 prim::Constant pnnx_5277 0 1 21658 value=-1 prim::Constant pnnx_5278 0 1 21659 value=8 prim::Constant pnnx_5279 0 1 21660 value=8 prim::ListConstruct pnnx_5280 4 1 21658 21659 21660 6367 6478 prim::Constant pnnx_5282 0 1 21661 value=8 prim::Constant pnnx_5283 0 1 21662 value=trunc aten::div pnnx_5284 3 1 H.1 21661 21662 6480 aten::Int pnnx_5285 1 1 6480 6481 prim::Constant pnnx_5286 0 1 21663 value=8 prim::Constant pnnx_5287 0 1 21664 value=trunc aten::div pnnx_5288 3 1 W.1 21663 21664 6482 aten::Int pnnx_5289 1 1 6482 6483 prim::Constant pnnx_5290 0 1 21665 value=1 prim::Constant pnnx_5291 0 1 21666 value=8 prim::Constant pnnx_5292 0 1 21667 value=8 prim::Constant pnnx_5293 0 1 21668 value=-1 prim::ListConstruct pnnx_5294 6 1 21665 6481 6483 21666 21667 21668 6484 prim::Constant pnnx_5296 0 1 21669 value=0 prim::Constant pnnx_5297 0 1 21670 value=1 prim::Constant pnnx_5298 0 1 21671 value=3 prim::Constant pnnx_5299 0 1 21672 value=2 prim::Constant pnnx_5300 0 1 21673 value=4 prim::Constant pnnx_5301 0 1 21674 value=5 prim::ListConstruct pnnx_5302 6 1 21669 21670 21671 21672 21673 21674 6486 Tensor.view Tensor.view_1260 2 1 6477 6478 windows.53 $input=6477 $shape=6478 #6477=(36,64,192)f32 #windows.53=(36,8,8,192)f32 Tensor.view Tensor.view_1261 2 1 windows.53 6484 x3.53 $input=windows.53 $shape=6484 #windows.53=(36,8,8,192)f32 #x3.53=(1,6,6,8,8,192)f32 prim::Constant pnnx_5306 0 1 21676 value=1 prim::Constant pnnx_5307 0 1 21677 value=-1 prim::ListConstruct pnnx_5308 4 1 21676 232 472 21677 6489 torch.permute torch.permute_2631 2 1 x3.53 6486 6487 $input=x3.53 $dims=6486 #x3.53=(1,6,6,8,8,192)f32 #6487=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_79 1 1 6487 6488 memory_format=torch.contiguous_format $input=6487 #6487=(1,6,8,6,8,192)f32 #6488=(1,6,8,6,8,192)f32 prim::Constant pnnx_5310 0 1 21678 value=4 prim::Constant pnnx_5311 0 1 21679 value=4 prim::ListConstruct pnnx_5312 2 1 21678 21679 6491 prim::Constant pnnx_5313 0 1 21680 value=1 prim::Constant pnnx_5314 0 1 21681 value=2 prim::ListConstruct pnnx_5315 2 1 21680 21681 6492 Tensor.view Tensor.view_1262 2 1 6488 6489 shifted_x.27 $input=6488 $shape=6489 #6488=(1,6,8,6,8,192)f32 #shifted_x.27=(1,48,48,192)f32 aten::mul pnnx_5317 2 1 H.1 W.1 6494 aten::Int pnnx_5318 1 1 6494 6495 prim::ListConstruct pnnx_5319 3 1 6362 6495 6366 6496 prim::Constant pnnx_5321 0 1 6498 value=None prim::Constant pnnx_5322 0 1 21682 value=1 torch.roll torch.roll_2445 3 1 shifted_x.27 6491 6492 x4.53 $input=shifted_x.27 $shifts=6491 $dims=6492 #shifted_x.27=(1,48,48,192)f32 #x4.53=(1,48,48,192)f32 Tensor.view Tensor.view_1263 2 1 x4.53 6496 x5.27 $input=x4.53 $shape=6496 #x4.53=(1,48,48,192)f32 #x5.27=(1,2304,192)f32 aten::add pnnx_5323 3 1 6341 x5.27 21682 input.121 #6341=(1,2304,192)f32 #x5.27=(1,2304,192)f32 #input.121=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.1.norm2 1 1 input.121 6500 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.121=(1,2304,192)f32 #6500=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.1.mlp.fc1 1 1 6500 6505 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #6500=(1,2304,192)f32 #6505=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.1.mlp.act 1 1 6505 6506 #6505=(1,2304,384)f32 #6506=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.mlp.drop 1 1 6506 6507 #6506=(1,2304,384)f32 #6507=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.1.mlp.fc2 1 1 6507 6508 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #6507=(1,2304,384)f32 #6508=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.mlp.drop 1 1 6508 6509 #6508=(1,2304,192)f32 #6509=(1,2304,192)f32 prim::Constant pnnx_5324 0 1 6510 value=None prim::Constant pnnx_5325 0 1 21683 value=1 aten::add pnnx_5326 3 1 input.121 6509 21683 6511 #input.121=(1,2304,192)f32 #6509=(1,2304,192)f32 #6511=(1,2304,192)f32 prim::Constant pnnx_5327 0 1 6512 value=trunc prim::Constant pnnx_5328 0 1 6513 value=8 prim::Constant pnnx_5329 0 1 6514 value=0 prim::Constant pnnx_5330 0 1 6515 value=2 prim::Constant pnnx_5331 0 1 6516 value=1 prim::Constant pnnx_5332 0 1 6517 value=3 prim::Constant pnnx_5333 0 1 6518 value=8 prim::Constant pnnx_5334 0 1 6519 value=4 prim::Constant pnnx_5335 0 1 6520 value=5 prim::Constant pnnx_5336 0 1 6521 value=-1 prim::Constant pnnx_5337 0 1 6522 value=64 aten::size pnnx_5338 2 1 6511 6514 6528 #6511=(1,2304,192)f32 prim::NumToTensor pnnx_5339 1 1 6528 B.65 aten::Int pnnx_5340 1 1 B.65 6530 aten::Int pnnx_5341 1 1 B.65 6531 aten::size pnnx_5342 2 1 6511 6515 6532 #6511=(1,2304,192)f32 prim::NumToTensor pnnx_5343 1 1 6532 C.115 aten::Int pnnx_5344 1 1 C.115 6534 aten::Int pnnx_5345 1 1 C.115 6535 aten::Int pnnx_5346 1 1 C.115 6536 aten::Int pnnx_5347 1 1 C.115 6537 nn.LayerNorm layers_dfe.4.residual_group.blocks.2.norm1 1 1 6511 6538 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #6511=(1,2304,192)f32 #6538=(1,2304,192)f32 prim::ListConstruct pnnx_5348 4 1 6531 229 469 6537 6539 prim::Constant pnnx_5350 0 1 21684 value=0 Tensor.view Tensor.view_1264 2 1 6538 6539 x.55 $input=6538 $shape=6539 #6538=(1,2304,192)f32 #x.55=(1,48,48,192)f32 aten::size pnnx_5351 2 1 x.55 21684 6541 #x.55=(1,48,48,192)f32 prim::NumToTensor pnnx_5352 1 1 6541 B0.55 aten::Int pnnx_5353 1 1 B0.55 6543 aten::size pnnx_5354 2 1 x.55 6516 6544 #x.55=(1,48,48,192)f32 prim::NumToTensor pnnx_5355 1 1 6544 6545 prim::Constant pnnx_5356 0 1 21685 value=2 aten::size pnnx_5357 2 1 x.55 21685 6546 #x.55=(1,48,48,192)f32 prim::NumToTensor pnnx_5358 1 1 6546 6547 aten::size pnnx_5359 2 1 x.55 6517 6548 #x.55=(1,48,48,192)f32 prim::NumToTensor pnnx_5360 1 1 6548 C0.55 aten::Int pnnx_5361 1 1 C0.55 6550 aten::Int pnnx_5362 1 1 C0.55 6551 aten::div pnnx_5363 3 1 6545 6513 6512 6552 aten::Int pnnx_5364 1 1 6552 6553 prim::Constant pnnx_5365 0 1 21686 value=8 prim::Constant pnnx_5366 0 1 21687 value=trunc aten::div pnnx_5367 3 1 6547 21686 21687 6554 aten::Int pnnx_5368 1 1 6554 6555 prim::Constant pnnx_5369 0 1 21688 value=8 prim::ListConstruct pnnx_5370 6 1 6543 6553 6518 6555 21688 6551 6556 prim::Constant pnnx_5372 0 1 21689 value=0 prim::Constant pnnx_5373 0 1 21690 value=1 prim::Constant pnnx_5374 0 1 21691 value=3 prim::Constant pnnx_5375 0 1 21692 value=2 prim::ListConstruct pnnx_5376 6 1 21689 21690 21691 21692 6519 6520 6558 Tensor.view Tensor.view_1265 2 1 x.55 6556 x0.55 $input=x.55 $shape=6556 #x.55=(1,48,48,192)f32 #x0.55=(1,6,8,6,8,192)f32 prim::Constant pnnx_5380 0 1 21694 value=8 prim::Constant pnnx_5381 0 1 21695 value=8 prim::ListConstruct pnnx_5382 4 1 6521 21694 21695 6550 6561 torch.permute torch.permute_2632 2 1 x0.55 6558 6559 $input=x0.55 $dims=6558 #x0.55=(1,6,8,6,8,192)f32 #6559=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_80 1 1 6559 6560 memory_format=torch.contiguous_format $input=6559 #6559=(1,6,6,8,8,192)f32 #6560=(1,6,6,8,8,192)f32 prim::Constant pnnx_5384 0 1 21696 value=-1 prim::ListConstruct pnnx_5385 3 1 21696 6522 6536 6563 prim::Constant pnnx_5387 0 1 6565 value=1.767767e-01 prim::Constant pnnx_5388 0 1 6566 value=trunc prim::Constant pnnx_5389 0 1 6567 value=6 prim::Constant pnnx_5390 0 1 6568 value=0 prim::Constant pnnx_5391 0 1 6569 value=1 prim::Constant pnnx_5392 0 1 6570 value=2 prim::Constant pnnx_5393 0 1 6571 value=3 prim::Constant pnnx_5394 0 1 6572 value=6 prim::Constant pnnx_5395 0 1 6573 value=4 prim::Constant pnnx_5396 0 1 6574 value=-2 prim::Constant pnnx_5397 0 1 6575 value=-1 prim::Constant pnnx_5398 0 1 6576 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.2.attn 0 1 relative_position_bias_table.55 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.55=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.2.attn 0 1 relative_position_index.55 @relative_position_index=(64,64)i64 #relative_position_index.55=(64,64)i64 Tensor.view Tensor.view_1266 2 1 6560 6561 x_windows.55 $input=6560 $shape=6561 #6560=(1,6,6,8,8,192)f32 #x_windows.55=(36,8,8,192)f32 Tensor.view Tensor.view_1267 2 1 x_windows.55 6563 x1.55 $input=x_windows.55 $shape=6563 #x_windows.55=(36,8,8,192)f32 #x1.55=(36,64,192)f32 aten::size pnnx_5399 2 1 x1.55 6568 6584 #x1.55=(36,64,192)f32 prim::NumToTensor pnnx_5400 1 1 6584 B_.55 aten::Int pnnx_5401 1 1 B_.55 6586 aten::Int pnnx_5402 1 1 B_.55 6587 aten::size pnnx_5403 2 1 x1.55 6569 6588 #x1.55=(36,64,192)f32 prim::NumToTensor pnnx_5404 1 1 6588 N.55 aten::Int pnnx_5405 1 1 N.55 6590 aten::Int pnnx_5406 1 1 N.55 6591 aten::size pnnx_5407 2 1 x1.55 6570 6592 #x1.55=(36,64,192)f32 prim::NumToTensor pnnx_5408 1 1 6592 C.117 aten::Int pnnx_5409 1 1 C.117 6594 nn.Linear layers_dfe.4.residual_group.blocks.2.attn.qkv 1 1 x1.55 6595 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.55=(36,64,192)f32 #6595=(36,64,576)f32 aten::div pnnx_5410 3 1 C.117 6567 6566 6596 aten::Int pnnx_5411 1 1 6596 6597 prim::ListConstruct pnnx_5412 5 1 6587 6591 6571 6572 6597 6598 prim::Constant pnnx_5414 0 1 21697 value=2 prim::Constant pnnx_5415 0 1 21698 value=0 prim::Constant pnnx_5416 0 1 21699 value=3 prim::Constant pnnx_5417 0 1 21700 value=1 prim::ListConstruct pnnx_5418 5 1 21697 21698 21699 21700 6573 6600 Tensor.reshape Tensor.reshape_486 2 1 6595 6598 6599 $input=6595 $shape=6598 #6595=(36,64,576)f32 #6599=(36,64,3,6,32)f32 prim::Constant pnnx_5420 0 1 21701 value=0 prim::Constant pnnx_5421 0 1 21702 value=0 prim::Constant pnnx_5423 0 1 21703 value=0 prim::Constant pnnx_5424 0 1 21704 value=1 prim::Constant pnnx_5426 0 1 21705 value=0 prim::Constant pnnx_5427 0 1 21706 value=2 torch.permute torch.permute_2633 2 1 6599 6600 qkv0.55 $input=6599 $dims=6600 #6599=(36,64,3,6,32)f32 #qkv0.55=(3,36,6,64,32)f32 Tensor.select Tensor.select_728 3 1 qkv0.55 21701 21702 q.55 $input=qkv0.55 $dim=21701 $index=21702 #qkv0.55=(3,36,6,64,32)f32 #q.55=(36,6,64,32)f32 aten::mul pnnx_5429 2 1 q.55 6565 q0.55 #q.55=(36,6,64,32)f32 #q0.55=(36,6,64,32)f32 Tensor.select Tensor.select_729 3 1 qkv0.55 21703 21704 k.55 $input=qkv0.55 $dim=21703 $index=21704 #qkv0.55=(3,36,6,64,32)f32 #k.55=(36,6,64,32)f32 prim::Constant pnnx_5432 0 1 21707 value=-1 prim::ListConstruct pnnx_5433 1 1 21707 6608 Tensor.view Tensor.view_1268 2 1 relative_position_index.55 6608 6609 $input=relative_position_index.55 $shape=6608 #relative_position_index.55=(64,64)i64 #6609=(4096)i64 prim::ListConstruct pnnx_5435 1 1 6609 6610 #6609=(4096)i64 prim::Constant pnnx_5437 0 1 21708 value=64 prim::Constant pnnx_5438 0 1 21709 value=-1 prim::ListConstruct pnnx_5439 3 1 6576 21708 21709 6612 Tensor.index Tensor.index_352 2 1 relative_position_bias_table.55 6610 6611 $input=relative_position_bias_table.55 $expr=6610 #relative_position_bias_table.55=(225,6)f32 #6611=(4096,6)f32 prim::Constant pnnx_5441 0 1 21710 value=2 prim::Constant pnnx_5442 0 1 21711 value=0 prim::Constant pnnx_5443 0 1 21712 value=1 prim::ListConstruct pnnx_5444 3 1 21710 21711 21712 6614 Tensor.view Tensor.view_1269 2 1 6611 6612 relative_position_bias.55 $input=6611 $shape=6612 #6611=(4096,6)f32 #relative_position_bias.55=(64,64,6)f32 prim::Constant pnnx_5448 0 1 21714 value=0 torch.permute torch.permute_2634 2 1 relative_position_bias.55 6614 6615 $input=relative_position_bias.55 $dims=6614 #relative_position_bias.55=(64,64,6)f32 #6615=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_81 1 1 6615 relative_position_bias0.55 memory_format=torch.contiguous_format $input=6615 #6615=(6,64,64)f32 #relative_position_bias0.55=(6,64,64)f32 prim::Constant pnnx_5450 0 1 21715 value=1 torch.transpose torch.transpose_3021 3 1 k.55 6574 6575 6606 $input=k.55 $dim0=6574 $dim1=6575 #k.55=(36,6,64,32)f32 #6606=(36,6,32,64)f32 torch.matmul torch.matmul_2256 2 1 q0.55 6606 attn.111 $input=q0.55 $other=6606 #q0.55=(36,6,64,32)f32 #6606=(36,6,32,64)f32 #attn.111=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3270 2 1 relative_position_bias0.55 21714 6617 $input=relative_position_bias0.55 $dim=21714 #relative_position_bias0.55=(6,64,64)f32 #6617=(1,6,64,64)f32 aten::add pnnx_5451 3 1 attn.111 6617 21715 input.123 #attn.111=(36,6,64,64)f32 #6617=(1,6,64,64)f32 #input.123=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.2.attn.softmax 1 1 input.123 6619 dim=-1 #input.123=(36,6,64,64)f32 #6619=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.attn.attn_drop 1 1 6619 6620 #6619=(36,6,64,64)f32 #6620=(36,6,64,64)f32 Tensor.select Tensor.select_730 3 1 qkv0.55 21705 21706 v.55 $input=qkv0.55 $dim=21705 $index=21706 #qkv0.55=(3,36,6,64,32)f32 #v.55=(36,6,64,32)f32 prim::Constant pnnx_5453 0 1 21716 value=1 prim::Constant pnnx_5454 0 1 21717 value=2 torch.matmul torch.matmul_2257 2 1 6620 v.55 6621 $input=6620 $other=v.55 #6620=(36,6,64,64)f32 #v.55=(36,6,64,32)f32 #6621=(36,6,64,32)f32 prim::ListConstruct pnnx_5456 3 1 6586 6590 6594 6623 torch.transpose torch.transpose_3022 3 1 6621 21716 21717 6622 $input=6621 $dim0=21716 $dim1=21717 #6621=(36,6,64,32)f32 #6622=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_487 2 1 6622 6623 input0.57 $input=6622 $shape=6623 #6622=(36,64,6,32)f32 #input0.57=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.2.attn.proj 1 1 input0.57 6625 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.57=(36,64,192)f32 #6625=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.attn.proj_drop 1 1 6625 6626 #6625=(36,64,192)f32 #6626=(36,64,192)f32 prim::Constant pnnx_5458 0 1 21718 value=-1 prim::Constant pnnx_5459 0 1 21719 value=8 prim::Constant pnnx_5460 0 1 21720 value=8 prim::ListConstruct pnnx_5461 4 1 21718 21719 21720 6535 6627 prim::Constant pnnx_5463 0 1 21721 value=8 prim::Constant pnnx_5464 0 1 21722 value=trunc aten::div pnnx_5465 3 1 H.1 21721 21722 6629 aten::Int pnnx_5466 1 1 6629 6630 prim::Constant pnnx_5467 0 1 21723 value=8 prim::Constant pnnx_5468 0 1 21724 value=trunc aten::div pnnx_5469 3 1 W.1 21723 21724 6631 aten::Int pnnx_5470 1 1 6631 6632 prim::Constant pnnx_5471 0 1 21725 value=1 prim::Constant pnnx_5472 0 1 21726 value=8 prim::Constant pnnx_5473 0 1 21727 value=8 prim::Constant pnnx_5474 0 1 21728 value=-1 prim::ListConstruct pnnx_5475 6 1 21725 6630 6632 21726 21727 21728 6633 prim::Constant pnnx_5477 0 1 21729 value=0 prim::Constant pnnx_5478 0 1 21730 value=1 prim::Constant pnnx_5479 0 1 21731 value=3 prim::Constant pnnx_5480 0 1 21732 value=2 prim::Constant pnnx_5481 0 1 21733 value=4 prim::Constant pnnx_5482 0 1 21734 value=5 prim::ListConstruct pnnx_5483 6 1 21729 21730 21731 21732 21733 21734 6635 Tensor.view Tensor.view_1270 2 1 6626 6627 windows.55 $input=6626 $shape=6627 #6626=(36,64,192)f32 #windows.55=(36,8,8,192)f32 Tensor.view Tensor.view_1271 2 1 windows.55 6633 x2.55 $input=windows.55 $shape=6633 #windows.55=(36,8,8,192)f32 #x2.55=(1,6,6,8,8,192)f32 prim::Constant pnnx_5487 0 1 21736 value=1 prim::Constant pnnx_5488 0 1 21737 value=-1 prim::ListConstruct pnnx_5489 4 1 21736 226 466 21737 6638 torch.permute torch.permute_2635 2 1 x2.55 6635 6636 $input=x2.55 $dims=6635 #x2.55=(1,6,6,8,8,192)f32 #6636=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_82 1 1 6636 6637 memory_format=torch.contiguous_format $input=6636 #6636=(1,6,8,6,8,192)f32 #6637=(1,6,8,6,8,192)f32 aten::mul pnnx_5491 2 1 H.1 W.1 6640 aten::Int pnnx_5492 1 1 6640 6641 prim::ListConstruct pnnx_5493 3 1 6530 6641 6534 6642 prim::Constant pnnx_5495 0 1 6644 value=None prim::Constant pnnx_5496 0 1 21738 value=1 Tensor.view Tensor.view_1272 2 1 6637 6638 x3.55 $input=6637 $shape=6638 #6637=(1,6,8,6,8,192)f32 #x3.55=(1,48,48,192)f32 Tensor.view Tensor.view_1273 2 1 x3.55 6642 x4.55 $input=x3.55 $shape=6642 #x3.55=(1,48,48,192)f32 #x4.55=(1,2304,192)f32 aten::add pnnx_5497 3 1 6511 x4.55 21738 input.125 #6511=(1,2304,192)f32 #x4.55=(1,2304,192)f32 #input.125=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.2.norm2 1 1 input.125 6646 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.125=(1,2304,192)f32 #6646=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.2.mlp.fc1 1 1 6646 6651 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #6646=(1,2304,192)f32 #6651=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.2.mlp.act 1 1 6651 6652 #6651=(1,2304,384)f32 #6652=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.mlp.drop 1 1 6652 6653 #6652=(1,2304,384)f32 #6653=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.2.mlp.fc2 1 1 6653 6654 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #6653=(1,2304,384)f32 #6654=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.mlp.drop 1 1 6654 6655 #6654=(1,2304,192)f32 #6655=(1,2304,192)f32 prim::Constant pnnx_5498 0 1 6656 value=None prim::Constant pnnx_5499 0 1 21739 value=1 aten::add pnnx_5500 3 1 input.125 6655 21739 6657 #input.125=(1,2304,192)f32 #6655=(1,2304,192)f32 #6657=(1,2304,192)f32 prim::Constant pnnx_5501 0 1 6658 value=trunc prim::Constant pnnx_5502 0 1 6659 value=8 prim::Constant pnnx_5503 0 1 6660 value=0 prim::Constant pnnx_5504 0 1 6661 value=2 prim::Constant pnnx_5505 0 1 6662 value=-4 prim::Constant pnnx_5506 0 1 6663 value=1 prim::Constant pnnx_5507 0 1 6664 value=3 prim::Constant pnnx_5508 0 1 6665 value=8 prim::Constant pnnx_5509 0 1 6666 value=4 prim::Constant pnnx_5510 0 1 6667 value=5 prim::Constant pnnx_5511 0 1 6668 value=-1 prim::Constant pnnx_5512 0 1 6669 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.3 0 1 attn_mask.29 @attn_mask=(36,64,64)f32 #attn_mask.29=(36,64,64)f32 aten::size pnnx_5513 2 1 6657 6660 6676 #6657=(1,2304,192)f32 prim::NumToTensor pnnx_5514 1 1 6676 B.67 aten::Int pnnx_5515 1 1 B.67 6678 aten::Int pnnx_5516 1 1 B.67 6679 aten::size pnnx_5517 2 1 6657 6661 6680 #6657=(1,2304,192)f32 prim::NumToTensor pnnx_5518 1 1 6680 C.119 aten::Int pnnx_5519 1 1 C.119 6682 aten::Int pnnx_5520 1 1 C.119 6683 aten::Int pnnx_5521 1 1 C.119 6684 aten::Int pnnx_5522 1 1 C.119 6685 nn.LayerNorm layers_dfe.4.residual_group.blocks.3.norm1 1 1 6657 6686 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #6657=(1,2304,192)f32 #6686=(1,2304,192)f32 prim::ListConstruct pnnx_5523 4 1 6679 223 463 6685 6687 prim::Constant pnnx_5525 0 1 21740 value=-4 prim::ListConstruct pnnx_5526 2 1 6662 21740 6689 prim::Constant pnnx_5527 0 1 21741 value=2 prim::ListConstruct pnnx_5528 2 1 6663 21741 6690 Tensor.view Tensor.view_1274 2 1 6686 6687 x.57 $input=6686 $shape=6687 #6686=(1,2304,192)f32 #x.57=(1,48,48,192)f32 prim::Constant pnnx_5530 0 1 21742 value=0 torch.roll torch.roll_2446 3 1 x.57 6689 6690 x0.57 $input=x.57 $shifts=6689 $dims=6690 #x.57=(1,48,48,192)f32 #x0.57=(1,48,48,192)f32 aten::size pnnx_5531 2 1 x0.57 21742 6692 #x0.57=(1,48,48,192)f32 prim::NumToTensor pnnx_5532 1 1 6692 B0.57 aten::Int pnnx_5533 1 1 B0.57 6694 prim::Constant pnnx_5534 0 1 21743 value=1 aten::size pnnx_5535 2 1 x0.57 21743 6695 #x0.57=(1,48,48,192)f32 prim::NumToTensor pnnx_5536 1 1 6695 6696 prim::Constant pnnx_5537 0 1 21744 value=2 aten::size pnnx_5538 2 1 x0.57 21744 6697 #x0.57=(1,48,48,192)f32 prim::NumToTensor pnnx_5539 1 1 6697 6698 aten::size pnnx_5540 2 1 x0.57 6664 6699 #x0.57=(1,48,48,192)f32 prim::NumToTensor pnnx_5541 1 1 6699 C0.57 aten::Int pnnx_5542 1 1 C0.57 6701 aten::Int pnnx_5543 1 1 C0.57 6702 aten::div pnnx_5544 3 1 6696 6659 6658 6703 aten::Int pnnx_5545 1 1 6703 6704 prim::Constant pnnx_5546 0 1 21745 value=8 prim::Constant pnnx_5547 0 1 21746 value=trunc aten::div pnnx_5548 3 1 6698 21745 21746 6705 aten::Int pnnx_5549 1 1 6705 6706 prim::Constant pnnx_5550 0 1 21747 value=8 prim::ListConstruct pnnx_5551 6 1 6694 6704 6665 6706 21747 6702 6707 prim::Constant pnnx_5553 0 1 21748 value=0 prim::Constant pnnx_5554 0 1 21749 value=1 prim::Constant pnnx_5555 0 1 21750 value=3 prim::Constant pnnx_5556 0 1 21751 value=2 prim::ListConstruct pnnx_5557 6 1 21748 21749 21750 21751 6666 6667 6709 Tensor.view Tensor.view_1275 2 1 x0.57 6707 x1.57 $input=x0.57 $shape=6707 #x0.57=(1,48,48,192)f32 #x1.57=(1,6,8,6,8,192)f32 prim::Constant pnnx_5561 0 1 21753 value=8 prim::Constant pnnx_5562 0 1 21754 value=8 prim::ListConstruct pnnx_5563 4 1 6668 21753 21754 6701 6712 torch.permute torch.permute_2636 2 1 x1.57 6709 6710 $input=x1.57 $dims=6709 #x1.57=(1,6,8,6,8,192)f32 #6710=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_83 1 1 6710 6711 memory_format=torch.contiguous_format $input=6710 #6710=(1,6,6,8,8,192)f32 #6711=(1,6,6,8,8,192)f32 prim::Constant pnnx_5565 0 1 21755 value=-1 prim::ListConstruct pnnx_5566 3 1 21755 6669 6684 6714 prim::Constant pnnx_5568 0 1 6716 value=1.767767e-01 prim::Constant pnnx_5569 0 1 6717 value=trunc prim::Constant pnnx_5570 0 1 6718 value=6 prim::Constant pnnx_5571 0 1 6719 value=0 prim::Constant pnnx_5572 0 1 6720 value=1 prim::Constant pnnx_5573 0 1 6721 value=2 prim::Constant pnnx_5574 0 1 6722 value=3 prim::Constant pnnx_5575 0 1 6723 value=6 prim::Constant pnnx_5576 0 1 6724 value=4 prim::Constant pnnx_5577 0 1 6725 value=-2 prim::Constant pnnx_5578 0 1 6726 value=-1 prim::Constant pnnx_5579 0 1 6727 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.3.attn 0 1 relative_position_bias_table.57 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.57=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.3.attn 0 1 relative_position_index.57 @relative_position_index=(64,64)i64 #relative_position_index.57=(64,64)i64 Tensor.view Tensor.view_1276 2 1 6711 6712 x_windows.57 $input=6711 $shape=6712 #6711=(1,6,6,8,8,192)f32 #x_windows.57=(36,8,8,192)f32 Tensor.view Tensor.view_1277 2 1 x_windows.57 6714 x2.57 $input=x_windows.57 $shape=6714 #x_windows.57=(36,8,8,192)f32 #x2.57=(36,64,192)f32 aten::size pnnx_5580 2 1 x2.57 6719 6735 #x2.57=(36,64,192)f32 prim::NumToTensor pnnx_5581 1 1 6735 B_.57 aten::Int pnnx_5582 1 1 B_.57 6737 aten::Int pnnx_5583 1 1 B_.57 6738 aten::size pnnx_5584 2 1 x2.57 6720 6739 #x2.57=(36,64,192)f32 prim::NumToTensor pnnx_5585 1 1 6739 N.57 aten::Int pnnx_5586 1 1 N.57 6741 aten::Int pnnx_5587 1 1 N.57 6742 aten::Int pnnx_5588 1 1 N.57 6743 aten::Int pnnx_5589 1 1 N.57 6744 aten::Int pnnx_5590 1 1 N.57 6745 aten::Int pnnx_5591 1 1 N.57 6746 aten::size pnnx_5592 2 1 x2.57 6721 6747 #x2.57=(36,64,192)f32 prim::NumToTensor pnnx_5593 1 1 6747 C.121 aten::Int pnnx_5594 1 1 C.121 6749 nn.Linear layers_dfe.4.residual_group.blocks.3.attn.qkv 1 1 x2.57 6750 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.57=(36,64,192)f32 #6750=(36,64,576)f32 aten::div pnnx_5595 3 1 C.121 6718 6717 6751 aten::Int pnnx_5596 1 1 6751 6752 prim::ListConstruct pnnx_5597 5 1 6738 6746 6722 6723 6752 6753 prim::Constant pnnx_5599 0 1 21756 value=2 prim::Constant pnnx_5600 0 1 21757 value=0 prim::Constant pnnx_5601 0 1 21758 value=3 prim::Constant pnnx_5602 0 1 21759 value=1 prim::ListConstruct pnnx_5603 5 1 21756 21757 21758 21759 6724 6755 Tensor.reshape Tensor.reshape_488 2 1 6750 6753 6754 $input=6750 $shape=6753 #6750=(36,64,576)f32 #6754=(36,64,3,6,32)f32 prim::Constant pnnx_5605 0 1 21760 value=0 prim::Constant pnnx_5606 0 1 21761 value=0 prim::Constant pnnx_5608 0 1 21762 value=0 prim::Constant pnnx_5609 0 1 21763 value=1 prim::Constant pnnx_5611 0 1 21764 value=0 prim::Constant pnnx_5612 0 1 21765 value=2 torch.permute torch.permute_2637 2 1 6754 6755 qkv0.57 $input=6754 $dims=6755 #6754=(36,64,3,6,32)f32 #qkv0.57=(3,36,6,64,32)f32 Tensor.select Tensor.select_731 3 1 qkv0.57 21760 21761 q.57 $input=qkv0.57 $dim=21760 $index=21761 #qkv0.57=(3,36,6,64,32)f32 #q.57=(36,6,64,32)f32 aten::mul pnnx_5614 2 1 q.57 6716 q0.57 #q.57=(36,6,64,32)f32 #q0.57=(36,6,64,32)f32 Tensor.select Tensor.select_732 3 1 qkv0.57 21762 21763 k.57 $input=qkv0.57 $dim=21762 $index=21763 #qkv0.57=(3,36,6,64,32)f32 #k.57=(36,6,64,32)f32 prim::Constant pnnx_5617 0 1 21766 value=-1 prim::ListConstruct pnnx_5618 1 1 21766 6763 Tensor.view Tensor.view_1278 2 1 relative_position_index.57 6763 6764 $input=relative_position_index.57 $shape=6763 #relative_position_index.57=(64,64)i64 #6764=(4096)i64 prim::ListConstruct pnnx_5620 1 1 6764 6765 #6764=(4096)i64 prim::Constant pnnx_5622 0 1 21767 value=64 prim::Constant pnnx_5623 0 1 21768 value=-1 prim::ListConstruct pnnx_5624 3 1 6727 21767 21768 6767 Tensor.index Tensor.index_353 2 1 relative_position_bias_table.57 6765 6766 $input=relative_position_bias_table.57 $expr=6765 #relative_position_bias_table.57=(225,6)f32 #6766=(4096,6)f32 prim::Constant pnnx_5626 0 1 21769 value=2 prim::Constant pnnx_5627 0 1 21770 value=0 prim::Constant pnnx_5628 0 1 21771 value=1 prim::ListConstruct pnnx_5629 3 1 21769 21770 21771 6769 Tensor.view Tensor.view_1279 2 1 6766 6767 relative_position_bias.57 $input=6766 $shape=6767 #6766=(4096,6)f32 #relative_position_bias.57=(64,64,6)f32 prim::Constant pnnx_5633 0 1 21773 value=0 torch.permute torch.permute_2638 2 1 relative_position_bias.57 6769 6770 $input=relative_position_bias.57 $dims=6769 #relative_position_bias.57=(64,64,6)f32 #6770=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_84 1 1 6770 relative_position_bias0.57 memory_format=torch.contiguous_format $input=6770 #6770=(6,64,64)f32 #relative_position_bias0.57=(6,64,64)f32 prim::Constant pnnx_5635 0 1 21774 value=1 torch.transpose torch.transpose_3023 3 1 k.57 6725 6726 6761 $input=k.57 $dim0=6725 $dim1=6726 #k.57=(36,6,64,32)f32 #6761=(36,6,32,64)f32 torch.matmul torch.matmul_2258 2 1 q0.57 6761 attn.115 $input=q0.57 $other=6761 #q0.57=(36,6,64,32)f32 #6761=(36,6,32,64)f32 #attn.115=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3271 2 1 relative_position_bias0.57 21773 6772 $input=relative_position_bias0.57 $dim=21773 #relative_position_bias0.57=(6,64,64)f32 #6772=(1,6,64,64)f32 aten::add pnnx_5636 3 1 attn.115 6772 21774 attn0.29 #attn.115=(36,6,64,64)f32 #6772=(1,6,64,64)f32 #attn0.29=(36,6,64,64)f32 prim::Constant pnnx_5637 0 1 21775 value=0 aten::size pnnx_5638 2 1 attn_mask.29 21775 6774 #attn_mask.29=(36,64,64)f32 prim::NumToTensor pnnx_5639 1 1 6774 other.29 aten::Int pnnx_5640 1 1 other.29 6776 prim::Constant pnnx_5641 0 1 21776 value=trunc aten::div pnnx_5642 3 1 B_.57 other.29 21776 6777 aten::Int pnnx_5643 1 1 6777 6778 prim::Constant pnnx_5644 0 1 21777 value=6 prim::ListConstruct pnnx_5645 5 1 6778 6776 21777 6745 6744 6779 prim::Constant pnnx_5647 0 1 21778 value=1 prim::Constant pnnx_5649 0 1 21779 value=0 prim::Constant pnnx_5651 0 1 21780 value=1 Tensor.view Tensor.view_1280 2 1 attn0.29 6779 6780 $input=attn0.29 $shape=6779 #attn0.29=(36,6,64,64)f32 #6780=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3272 2 1 attn_mask.29 21778 6781 $input=attn_mask.29 $dim=21778 #attn_mask.29=(36,64,64)f32 #6781=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3273 2 1 6781 21779 6782 $input=6781 $dim=21779 #6781=(36,1,64,64)f32 #6782=(1,36,1,64,64)f32 aten::add pnnx_5652 3 1 6780 6782 21780 attn1.29 #6780=(1,36,6,64,64)f32 #6782=(1,36,1,64,64)f32 #attn1.29=(1,36,6,64,64)f32 prim::Constant pnnx_5653 0 1 21781 value=-1 prim::Constant pnnx_5654 0 1 21782 value=6 prim::ListConstruct pnnx_5655 4 1 21781 21782 6743 6742 6784 Tensor.view Tensor.view_1281 2 1 attn1.29 6784 input.127 $input=attn1.29 $shape=6784 #attn1.29=(1,36,6,64,64)f32 #input.127=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.3.attn.softmax 1 1 input.127 6786 dim=-1 #input.127=(36,6,64,64)f32 #6786=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.attn.attn_drop 1 1 6786 6787 #6786=(36,6,64,64)f32 #6787=(36,6,64,64)f32 Tensor.select Tensor.select_733 3 1 qkv0.57 21764 21765 v.57 $input=qkv0.57 $dim=21764 $index=21765 #qkv0.57=(3,36,6,64,32)f32 #v.57=(36,6,64,32)f32 prim::Constant pnnx_5658 0 1 21783 value=1 prim::Constant pnnx_5659 0 1 21784 value=2 torch.matmul torch.matmul_2259 2 1 6787 v.57 6788 $input=6787 $other=v.57 #6787=(36,6,64,64)f32 #v.57=(36,6,64,32)f32 #6788=(36,6,64,32)f32 prim::ListConstruct pnnx_5661 3 1 6737 6741 6749 6790 torch.transpose torch.transpose_3024 3 1 6788 21783 21784 6789 $input=6788 $dim0=21783 $dim1=21784 #6788=(36,6,64,32)f32 #6789=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_489 2 1 6789 6790 input0.59 $input=6789 $shape=6790 #6789=(36,64,6,32)f32 #input0.59=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.3.attn.proj 1 1 input0.59 6792 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.59=(36,64,192)f32 #6792=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.attn.proj_drop 1 1 6792 6793 #6792=(36,64,192)f32 #6793=(36,64,192)f32 prim::Constant pnnx_5663 0 1 21785 value=-1 prim::Constant pnnx_5664 0 1 21786 value=8 prim::Constant pnnx_5665 0 1 21787 value=8 prim::ListConstruct pnnx_5666 4 1 21785 21786 21787 6683 6794 prim::Constant pnnx_5668 0 1 21788 value=8 prim::Constant pnnx_5669 0 1 21789 value=trunc aten::div pnnx_5670 3 1 H.1 21788 21789 6796 aten::Int pnnx_5671 1 1 6796 6797 prim::Constant pnnx_5672 0 1 21790 value=8 prim::Constant pnnx_5673 0 1 21791 value=trunc aten::div pnnx_5674 3 1 W.1 21790 21791 6798 aten::Int pnnx_5675 1 1 6798 6799 prim::Constant pnnx_5676 0 1 21792 value=1 prim::Constant pnnx_5677 0 1 21793 value=8 prim::Constant pnnx_5678 0 1 21794 value=8 prim::Constant pnnx_5679 0 1 21795 value=-1 prim::ListConstruct pnnx_5680 6 1 21792 6797 6799 21793 21794 21795 6800 prim::Constant pnnx_5682 0 1 21796 value=0 prim::Constant pnnx_5683 0 1 21797 value=1 prim::Constant pnnx_5684 0 1 21798 value=3 prim::Constant pnnx_5685 0 1 21799 value=2 prim::Constant pnnx_5686 0 1 21800 value=4 prim::Constant pnnx_5687 0 1 21801 value=5 prim::ListConstruct pnnx_5688 6 1 21796 21797 21798 21799 21800 21801 6802 Tensor.view Tensor.view_1282 2 1 6793 6794 windows.57 $input=6793 $shape=6794 #6793=(36,64,192)f32 #windows.57=(36,8,8,192)f32 Tensor.view Tensor.view_1283 2 1 windows.57 6800 x3.57 $input=windows.57 $shape=6800 #windows.57=(36,8,8,192)f32 #x3.57=(1,6,6,8,8,192)f32 prim::Constant pnnx_5692 0 1 21803 value=1 prim::Constant pnnx_5693 0 1 21804 value=-1 prim::ListConstruct pnnx_5694 4 1 21803 220 460 21804 6805 torch.permute torch.permute_2639 2 1 x3.57 6802 6803 $input=x3.57 $dims=6802 #x3.57=(1,6,6,8,8,192)f32 #6803=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_85 1 1 6803 6804 memory_format=torch.contiguous_format $input=6803 #6803=(1,6,8,6,8,192)f32 #6804=(1,6,8,6,8,192)f32 prim::Constant pnnx_5696 0 1 21805 value=4 prim::Constant pnnx_5697 0 1 21806 value=4 prim::ListConstruct pnnx_5698 2 1 21805 21806 6807 prim::Constant pnnx_5699 0 1 21807 value=1 prim::Constant pnnx_5700 0 1 21808 value=2 prim::ListConstruct pnnx_5701 2 1 21807 21808 6808 Tensor.view Tensor.view_1284 2 1 6804 6805 shifted_x.29 $input=6804 $shape=6805 #6804=(1,6,8,6,8,192)f32 #shifted_x.29=(1,48,48,192)f32 aten::mul pnnx_5703 2 1 H.1 W.1 6810 aten::Int pnnx_5704 1 1 6810 6811 prim::ListConstruct pnnx_5705 3 1 6678 6811 6682 6812 prim::Constant pnnx_5707 0 1 6814 value=None prim::Constant pnnx_5708 0 1 21809 value=1 torch.roll torch.roll_2447 3 1 shifted_x.29 6807 6808 x4.57 $input=shifted_x.29 $shifts=6807 $dims=6808 #shifted_x.29=(1,48,48,192)f32 #x4.57=(1,48,48,192)f32 Tensor.view Tensor.view_1285 2 1 x4.57 6812 x5.29 $input=x4.57 $shape=6812 #x4.57=(1,48,48,192)f32 #x5.29=(1,2304,192)f32 aten::add pnnx_5709 3 1 6657 x5.29 21809 input.129 #6657=(1,2304,192)f32 #x5.29=(1,2304,192)f32 #input.129=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.3.norm2 1 1 input.129 6816 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.129=(1,2304,192)f32 #6816=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.3.mlp.fc1 1 1 6816 6821 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #6816=(1,2304,192)f32 #6821=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.3.mlp.act 1 1 6821 6822 #6821=(1,2304,384)f32 #6822=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.mlp.drop 1 1 6822 6823 #6822=(1,2304,384)f32 #6823=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.3.mlp.fc2 1 1 6823 6824 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #6823=(1,2304,384)f32 #6824=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.mlp.drop 1 1 6824 6825 #6824=(1,2304,192)f32 #6825=(1,2304,192)f32 prim::Constant pnnx_5710 0 1 6826 value=None prim::Constant pnnx_5711 0 1 21810 value=1 aten::add pnnx_5712 3 1 input.129 6825 21810 6827 #input.129=(1,2304,192)f32 #6825=(1,2304,192)f32 #6827=(1,2304,192)f32 prim::Constant pnnx_5713 0 1 6828 value=trunc prim::Constant pnnx_5714 0 1 6829 value=8 prim::Constant pnnx_5715 0 1 6830 value=0 prim::Constant pnnx_5716 0 1 6831 value=2 prim::Constant pnnx_5717 0 1 6832 value=1 prim::Constant pnnx_5718 0 1 6833 value=3 prim::Constant pnnx_5719 0 1 6834 value=8 prim::Constant pnnx_5720 0 1 6835 value=4 prim::Constant pnnx_5721 0 1 6836 value=5 prim::Constant pnnx_5722 0 1 6837 value=-1 prim::Constant pnnx_5723 0 1 6838 value=64 aten::size pnnx_5724 2 1 6827 6830 6844 #6827=(1,2304,192)f32 prim::NumToTensor pnnx_5725 1 1 6844 B.69 aten::Int pnnx_5726 1 1 B.69 6846 aten::Int pnnx_5727 1 1 B.69 6847 aten::size pnnx_5728 2 1 6827 6831 6848 #6827=(1,2304,192)f32 prim::NumToTensor pnnx_5729 1 1 6848 C.123 aten::Int pnnx_5730 1 1 C.123 6850 aten::Int pnnx_5731 1 1 C.123 6851 aten::Int pnnx_5732 1 1 C.123 6852 aten::Int pnnx_5733 1 1 C.123 6853 nn.LayerNorm layers_dfe.4.residual_group.blocks.4.norm1 1 1 6827 6854 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #6827=(1,2304,192)f32 #6854=(1,2304,192)f32 prim::ListConstruct pnnx_5734 4 1 6847 217 457 6853 6855 prim::Constant pnnx_5736 0 1 21811 value=0 Tensor.view Tensor.view_1286 2 1 6854 6855 x.59 $input=6854 $shape=6855 #6854=(1,2304,192)f32 #x.59=(1,48,48,192)f32 aten::size pnnx_5737 2 1 x.59 21811 6857 #x.59=(1,48,48,192)f32 prim::NumToTensor pnnx_5738 1 1 6857 B0.59 aten::Int pnnx_5739 1 1 B0.59 6859 aten::size pnnx_5740 2 1 x.59 6832 6860 #x.59=(1,48,48,192)f32 prim::NumToTensor pnnx_5741 1 1 6860 6861 prim::Constant pnnx_5742 0 1 21812 value=2 aten::size pnnx_5743 2 1 x.59 21812 6862 #x.59=(1,48,48,192)f32 prim::NumToTensor pnnx_5744 1 1 6862 6863 aten::size pnnx_5745 2 1 x.59 6833 6864 #x.59=(1,48,48,192)f32 prim::NumToTensor pnnx_5746 1 1 6864 C0.59 aten::Int pnnx_5747 1 1 C0.59 6866 aten::Int pnnx_5748 1 1 C0.59 6867 aten::div pnnx_5749 3 1 6861 6829 6828 6868 aten::Int pnnx_5750 1 1 6868 6869 prim::Constant pnnx_5751 0 1 21813 value=8 prim::Constant pnnx_5752 0 1 21814 value=trunc aten::div pnnx_5753 3 1 6863 21813 21814 6870 aten::Int pnnx_5754 1 1 6870 6871 prim::Constant pnnx_5755 0 1 21815 value=8 prim::ListConstruct pnnx_5756 6 1 6859 6869 6834 6871 21815 6867 6872 prim::Constant pnnx_5758 0 1 21816 value=0 prim::Constant pnnx_5759 0 1 21817 value=1 prim::Constant pnnx_5760 0 1 21818 value=3 prim::Constant pnnx_5761 0 1 21819 value=2 prim::ListConstruct pnnx_5762 6 1 21816 21817 21818 21819 6835 6836 6874 Tensor.view Tensor.view_1287 2 1 x.59 6872 x0.59 $input=x.59 $shape=6872 #x.59=(1,48,48,192)f32 #x0.59=(1,6,8,6,8,192)f32 prim::Constant pnnx_5766 0 1 21821 value=8 prim::Constant pnnx_5767 0 1 21822 value=8 prim::ListConstruct pnnx_5768 4 1 6837 21821 21822 6866 6877 torch.permute torch.permute_2640 2 1 x0.59 6874 6875 $input=x0.59 $dims=6874 #x0.59=(1,6,8,6,8,192)f32 #6875=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_86 1 1 6875 6876 memory_format=torch.contiguous_format $input=6875 #6875=(1,6,6,8,8,192)f32 #6876=(1,6,6,8,8,192)f32 prim::Constant pnnx_5770 0 1 21823 value=-1 prim::ListConstruct pnnx_5771 3 1 21823 6838 6852 6879 prim::Constant pnnx_5773 0 1 6881 value=1.767767e-01 prim::Constant pnnx_5774 0 1 6882 value=trunc prim::Constant pnnx_5775 0 1 6883 value=6 prim::Constant pnnx_5776 0 1 6884 value=0 prim::Constant pnnx_5777 0 1 6885 value=1 prim::Constant pnnx_5778 0 1 6886 value=2 prim::Constant pnnx_5779 0 1 6887 value=3 prim::Constant pnnx_5780 0 1 6888 value=6 prim::Constant pnnx_5781 0 1 6889 value=4 prim::Constant pnnx_5782 0 1 6890 value=-2 prim::Constant pnnx_5783 0 1 6891 value=-1 prim::Constant pnnx_5784 0 1 6892 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.4.attn 0 1 relative_position_bias_table.59 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.59=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.4.attn 0 1 relative_position_index.59 @relative_position_index=(64,64)i64 #relative_position_index.59=(64,64)i64 Tensor.view Tensor.view_1288 2 1 6876 6877 x_windows.59 $input=6876 $shape=6877 #6876=(1,6,6,8,8,192)f32 #x_windows.59=(36,8,8,192)f32 Tensor.view Tensor.view_1289 2 1 x_windows.59 6879 x1.59 $input=x_windows.59 $shape=6879 #x_windows.59=(36,8,8,192)f32 #x1.59=(36,64,192)f32 aten::size pnnx_5785 2 1 x1.59 6884 6900 #x1.59=(36,64,192)f32 prim::NumToTensor pnnx_5786 1 1 6900 B_.59 aten::Int pnnx_5787 1 1 B_.59 6902 aten::Int pnnx_5788 1 1 B_.59 6903 aten::size pnnx_5789 2 1 x1.59 6885 6904 #x1.59=(36,64,192)f32 prim::NumToTensor pnnx_5790 1 1 6904 N.59 aten::Int pnnx_5791 1 1 N.59 6906 aten::Int pnnx_5792 1 1 N.59 6907 aten::size pnnx_5793 2 1 x1.59 6886 6908 #x1.59=(36,64,192)f32 prim::NumToTensor pnnx_5794 1 1 6908 C.125 aten::Int pnnx_5795 1 1 C.125 6910 nn.Linear layers_dfe.4.residual_group.blocks.4.attn.qkv 1 1 x1.59 6911 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.59=(36,64,192)f32 #6911=(36,64,576)f32 aten::div pnnx_5796 3 1 C.125 6883 6882 6912 aten::Int pnnx_5797 1 1 6912 6913 prim::ListConstruct pnnx_5798 5 1 6903 6907 6887 6888 6913 6914 prim::Constant pnnx_5800 0 1 21824 value=2 prim::Constant pnnx_5801 0 1 21825 value=0 prim::Constant pnnx_5802 0 1 21826 value=3 prim::Constant pnnx_5803 0 1 21827 value=1 prim::ListConstruct pnnx_5804 5 1 21824 21825 21826 21827 6889 6916 Tensor.reshape Tensor.reshape_490 2 1 6911 6914 6915 $input=6911 $shape=6914 #6911=(36,64,576)f32 #6915=(36,64,3,6,32)f32 prim::Constant pnnx_5806 0 1 21828 value=0 prim::Constant pnnx_5807 0 1 21829 value=0 prim::Constant pnnx_5809 0 1 21830 value=0 prim::Constant pnnx_5810 0 1 21831 value=1 prim::Constant pnnx_5812 0 1 21832 value=0 prim::Constant pnnx_5813 0 1 21833 value=2 torch.permute torch.permute_2641 2 1 6915 6916 qkv0.59 $input=6915 $dims=6916 #6915=(36,64,3,6,32)f32 #qkv0.59=(3,36,6,64,32)f32 Tensor.select Tensor.select_734 3 1 qkv0.59 21828 21829 q.59 $input=qkv0.59 $dim=21828 $index=21829 #qkv0.59=(3,36,6,64,32)f32 #q.59=(36,6,64,32)f32 aten::mul pnnx_5815 2 1 q.59 6881 q0.59 #q.59=(36,6,64,32)f32 #q0.59=(36,6,64,32)f32 Tensor.select Tensor.select_735 3 1 qkv0.59 21830 21831 k.59 $input=qkv0.59 $dim=21830 $index=21831 #qkv0.59=(3,36,6,64,32)f32 #k.59=(36,6,64,32)f32 prim::Constant pnnx_5818 0 1 21834 value=-1 prim::ListConstruct pnnx_5819 1 1 21834 6924 Tensor.view Tensor.view_1290 2 1 relative_position_index.59 6924 6925 $input=relative_position_index.59 $shape=6924 #relative_position_index.59=(64,64)i64 #6925=(4096)i64 prim::ListConstruct pnnx_5821 1 1 6925 6926 #6925=(4096)i64 prim::Constant pnnx_5823 0 1 21835 value=64 prim::Constant pnnx_5824 0 1 21836 value=-1 prim::ListConstruct pnnx_5825 3 1 6892 21835 21836 6928 Tensor.index Tensor.index_354 2 1 relative_position_bias_table.59 6926 6927 $input=relative_position_bias_table.59 $expr=6926 #relative_position_bias_table.59=(225,6)f32 #6927=(4096,6)f32 prim::Constant pnnx_5827 0 1 21837 value=2 prim::Constant pnnx_5828 0 1 21838 value=0 prim::Constant pnnx_5829 0 1 21839 value=1 prim::ListConstruct pnnx_5830 3 1 21837 21838 21839 6930 Tensor.view Tensor.view_1291 2 1 6927 6928 relative_position_bias.59 $input=6927 $shape=6928 #6927=(4096,6)f32 #relative_position_bias.59=(64,64,6)f32 prim::Constant pnnx_5834 0 1 21841 value=0 torch.permute torch.permute_2642 2 1 relative_position_bias.59 6930 6931 $input=relative_position_bias.59 $dims=6930 #relative_position_bias.59=(64,64,6)f32 #6931=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_87 1 1 6931 relative_position_bias0.59 memory_format=torch.contiguous_format $input=6931 #6931=(6,64,64)f32 #relative_position_bias0.59=(6,64,64)f32 prim::Constant pnnx_5836 0 1 21842 value=1 torch.transpose torch.transpose_3025 3 1 k.59 6890 6891 6922 $input=k.59 $dim0=6890 $dim1=6891 #k.59=(36,6,64,32)f32 #6922=(36,6,32,64)f32 torch.matmul torch.matmul_2260 2 1 q0.59 6922 attn.119 $input=q0.59 $other=6922 #q0.59=(36,6,64,32)f32 #6922=(36,6,32,64)f32 #attn.119=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3274 2 1 relative_position_bias0.59 21841 6933 $input=relative_position_bias0.59 $dim=21841 #relative_position_bias0.59=(6,64,64)f32 #6933=(1,6,64,64)f32 aten::add pnnx_5837 3 1 attn.119 6933 21842 input.131 #attn.119=(36,6,64,64)f32 #6933=(1,6,64,64)f32 #input.131=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.4.attn.softmax 1 1 input.131 6935 dim=-1 #input.131=(36,6,64,64)f32 #6935=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.attn.attn_drop 1 1 6935 6936 #6935=(36,6,64,64)f32 #6936=(36,6,64,64)f32 Tensor.select Tensor.select_736 3 1 qkv0.59 21832 21833 v.59 $input=qkv0.59 $dim=21832 $index=21833 #qkv0.59=(3,36,6,64,32)f32 #v.59=(36,6,64,32)f32 prim::Constant pnnx_5839 0 1 21843 value=1 prim::Constant pnnx_5840 0 1 21844 value=2 torch.matmul torch.matmul_2261 2 1 6936 v.59 6937 $input=6936 $other=v.59 #6936=(36,6,64,64)f32 #v.59=(36,6,64,32)f32 #6937=(36,6,64,32)f32 prim::ListConstruct pnnx_5842 3 1 6902 6906 6910 6939 torch.transpose torch.transpose_3026 3 1 6937 21843 21844 6938 $input=6937 $dim0=21843 $dim1=21844 #6937=(36,6,64,32)f32 #6938=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_491 2 1 6938 6939 input0.61 $input=6938 $shape=6939 #6938=(36,64,6,32)f32 #input0.61=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.4.attn.proj 1 1 input0.61 6941 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.61=(36,64,192)f32 #6941=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.attn.proj_drop 1 1 6941 6942 #6941=(36,64,192)f32 #6942=(36,64,192)f32 prim::Constant pnnx_5844 0 1 21845 value=-1 prim::Constant pnnx_5845 0 1 21846 value=8 prim::Constant pnnx_5846 0 1 21847 value=8 prim::ListConstruct pnnx_5847 4 1 21845 21846 21847 6851 6943 prim::Constant pnnx_5849 0 1 21848 value=8 prim::Constant pnnx_5850 0 1 21849 value=trunc aten::div pnnx_5851 3 1 H.1 21848 21849 6945 aten::Int pnnx_5852 1 1 6945 6946 prim::Constant pnnx_5853 0 1 21850 value=8 prim::Constant pnnx_5854 0 1 21851 value=trunc aten::div pnnx_5855 3 1 W.1 21850 21851 6947 aten::Int pnnx_5856 1 1 6947 6948 prim::Constant pnnx_5857 0 1 21852 value=1 prim::Constant pnnx_5858 0 1 21853 value=8 prim::Constant pnnx_5859 0 1 21854 value=8 prim::Constant pnnx_5860 0 1 21855 value=-1 prim::ListConstruct pnnx_5861 6 1 21852 6946 6948 21853 21854 21855 6949 prim::Constant pnnx_5863 0 1 21856 value=0 prim::Constant pnnx_5864 0 1 21857 value=1 prim::Constant pnnx_5865 0 1 21858 value=3 prim::Constant pnnx_5866 0 1 21859 value=2 prim::Constant pnnx_5867 0 1 21860 value=4 prim::Constant pnnx_5868 0 1 21861 value=5 prim::ListConstruct pnnx_5869 6 1 21856 21857 21858 21859 21860 21861 6951 Tensor.view Tensor.view_1292 2 1 6942 6943 windows.59 $input=6942 $shape=6943 #6942=(36,64,192)f32 #windows.59=(36,8,8,192)f32 Tensor.view Tensor.view_1293 2 1 windows.59 6949 x2.59 $input=windows.59 $shape=6949 #windows.59=(36,8,8,192)f32 #x2.59=(1,6,6,8,8,192)f32 prim::Constant pnnx_5873 0 1 21863 value=1 prim::Constant pnnx_5874 0 1 21864 value=-1 prim::ListConstruct pnnx_5875 4 1 21863 214 454 21864 6954 torch.permute torch.permute_2643 2 1 x2.59 6951 6952 $input=x2.59 $dims=6951 #x2.59=(1,6,6,8,8,192)f32 #6952=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_88 1 1 6952 6953 memory_format=torch.contiguous_format $input=6952 #6952=(1,6,8,6,8,192)f32 #6953=(1,6,8,6,8,192)f32 aten::mul pnnx_5877 2 1 H.1 W.1 6956 aten::Int pnnx_5878 1 1 6956 6957 prim::ListConstruct pnnx_5879 3 1 6846 6957 6850 6958 prim::Constant pnnx_5881 0 1 6960 value=None prim::Constant pnnx_5882 0 1 21865 value=1 Tensor.view Tensor.view_1294 2 1 6953 6954 x3.59 $input=6953 $shape=6954 #6953=(1,6,8,6,8,192)f32 #x3.59=(1,48,48,192)f32 Tensor.view Tensor.view_1295 2 1 x3.59 6958 x4.59 $input=x3.59 $shape=6958 #x3.59=(1,48,48,192)f32 #x4.59=(1,2304,192)f32 aten::add pnnx_5883 3 1 6827 x4.59 21865 input.133 #6827=(1,2304,192)f32 #x4.59=(1,2304,192)f32 #input.133=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.4.norm2 1 1 input.133 6962 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.133=(1,2304,192)f32 #6962=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.4.mlp.fc1 1 1 6962 6967 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #6962=(1,2304,192)f32 #6967=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.4.mlp.act 1 1 6967 6968 #6967=(1,2304,384)f32 #6968=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.mlp.drop 1 1 6968 6969 #6968=(1,2304,384)f32 #6969=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.4.mlp.fc2 1 1 6969 6970 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #6969=(1,2304,384)f32 #6970=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.mlp.drop 1 1 6970 6971 #6970=(1,2304,192)f32 #6971=(1,2304,192)f32 prim::Constant pnnx_5884 0 1 6972 value=None prim::Constant pnnx_5885 0 1 21866 value=1 aten::add pnnx_5886 3 1 input.133 6971 21866 6973 #input.133=(1,2304,192)f32 #6971=(1,2304,192)f32 #6973=(1,2304,192)f32 prim::Constant pnnx_5887 0 1 6974 value=trunc prim::Constant pnnx_5888 0 1 6975 value=8 prim::Constant pnnx_5889 0 1 6976 value=0 prim::Constant pnnx_5890 0 1 6977 value=2 prim::Constant pnnx_5891 0 1 6978 value=-4 prim::Constant pnnx_5892 0 1 6979 value=1 prim::Constant pnnx_5893 0 1 6980 value=3 prim::Constant pnnx_5894 0 1 6981 value=8 prim::Constant pnnx_5895 0 1 6982 value=4 prim::Constant pnnx_5896 0 1 6983 value=5 prim::Constant pnnx_5897 0 1 6984 value=-1 prim::Constant pnnx_5898 0 1 6985 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.5 0 1 attn_mask.31 @attn_mask=(36,64,64)f32 #attn_mask.31=(36,64,64)f32 aten::size pnnx_5899 2 1 6973 6976 6992 #6973=(1,2304,192)f32 prim::NumToTensor pnnx_5900 1 1 6992 B.71 aten::Int pnnx_5901 1 1 B.71 6994 aten::Int pnnx_5902 1 1 B.71 6995 aten::size pnnx_5903 2 1 6973 6977 6996 #6973=(1,2304,192)f32 prim::NumToTensor pnnx_5904 1 1 6996 C.127 aten::Int pnnx_5905 1 1 C.127 6998 aten::Int pnnx_5906 1 1 C.127 6999 aten::Int pnnx_5907 1 1 C.127 7000 aten::Int pnnx_5908 1 1 C.127 7001 nn.LayerNorm layers_dfe.4.residual_group.blocks.5.norm1 1 1 6973 7002 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #6973=(1,2304,192)f32 #7002=(1,2304,192)f32 prim::ListConstruct pnnx_5909 4 1 6995 211 451 7001 7003 prim::Constant pnnx_5911 0 1 21867 value=-4 prim::ListConstruct pnnx_5912 2 1 6978 21867 7005 prim::Constant pnnx_5913 0 1 21868 value=2 prim::ListConstruct pnnx_5914 2 1 6979 21868 7006 Tensor.view Tensor.view_1296 2 1 7002 7003 x.61 $input=7002 $shape=7003 #7002=(1,2304,192)f32 #x.61=(1,48,48,192)f32 prim::Constant pnnx_5916 0 1 21869 value=0 torch.roll torch.roll_2448 3 1 x.61 7005 7006 x0.61 $input=x.61 $shifts=7005 $dims=7006 #x.61=(1,48,48,192)f32 #x0.61=(1,48,48,192)f32 aten::size pnnx_5917 2 1 x0.61 21869 7008 #x0.61=(1,48,48,192)f32 prim::NumToTensor pnnx_5918 1 1 7008 B0.61 aten::Int pnnx_5919 1 1 B0.61 7010 prim::Constant pnnx_5920 0 1 21870 value=1 aten::size pnnx_5921 2 1 x0.61 21870 7011 #x0.61=(1,48,48,192)f32 prim::NumToTensor pnnx_5922 1 1 7011 7012 prim::Constant pnnx_5923 0 1 21871 value=2 aten::size pnnx_5924 2 1 x0.61 21871 7013 #x0.61=(1,48,48,192)f32 prim::NumToTensor pnnx_5925 1 1 7013 7014 aten::size pnnx_5926 2 1 x0.61 6980 7015 #x0.61=(1,48,48,192)f32 prim::NumToTensor pnnx_5927 1 1 7015 C0.61 aten::Int pnnx_5928 1 1 C0.61 7017 aten::Int pnnx_5929 1 1 C0.61 7018 aten::div pnnx_5930 3 1 7012 6975 6974 7019 aten::Int pnnx_5931 1 1 7019 7020 prim::Constant pnnx_5932 0 1 21872 value=8 prim::Constant pnnx_5933 0 1 21873 value=trunc aten::div pnnx_5934 3 1 7014 21872 21873 7021 aten::Int pnnx_5935 1 1 7021 7022 prim::Constant pnnx_5936 0 1 21874 value=8 prim::ListConstruct pnnx_5937 6 1 7010 7020 6981 7022 21874 7018 7023 prim::Constant pnnx_5939 0 1 21875 value=0 prim::Constant pnnx_5940 0 1 21876 value=1 prim::Constant pnnx_5941 0 1 21877 value=3 prim::Constant pnnx_5942 0 1 21878 value=2 prim::ListConstruct pnnx_5943 6 1 21875 21876 21877 21878 6982 6983 7025 Tensor.view Tensor.view_1297 2 1 x0.61 7023 x1.61 $input=x0.61 $shape=7023 #x0.61=(1,48,48,192)f32 #x1.61=(1,6,8,6,8,192)f32 prim::Constant pnnx_5947 0 1 21880 value=8 prim::Constant pnnx_5948 0 1 21881 value=8 prim::ListConstruct pnnx_5949 4 1 6984 21880 21881 7017 7028 torch.permute torch.permute_2644 2 1 x1.61 7025 7026 $input=x1.61 $dims=7025 #x1.61=(1,6,8,6,8,192)f32 #7026=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_89 1 1 7026 7027 memory_format=torch.contiguous_format $input=7026 #7026=(1,6,6,8,8,192)f32 #7027=(1,6,6,8,8,192)f32 prim::Constant pnnx_5951 0 1 21882 value=-1 prim::ListConstruct pnnx_5952 3 1 21882 6985 7000 7030 prim::Constant pnnx_5954 0 1 7032 value=1.767767e-01 prim::Constant pnnx_5955 0 1 7033 value=trunc prim::Constant pnnx_5956 0 1 7034 value=6 prim::Constant pnnx_5957 0 1 7035 value=0 prim::Constant pnnx_5958 0 1 7036 value=1 prim::Constant pnnx_5959 0 1 7037 value=2 prim::Constant pnnx_5960 0 1 7038 value=3 prim::Constant pnnx_5961 0 1 7039 value=6 prim::Constant pnnx_5962 0 1 7040 value=4 prim::Constant pnnx_5963 0 1 7041 value=-2 prim::Constant pnnx_5964 0 1 7042 value=-1 prim::Constant pnnx_5965 0 1 7043 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.5.attn 0 1 relative_position_bias_table.61 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.61=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.5.attn 0 1 relative_position_index.61 @relative_position_index=(64,64)i64 #relative_position_index.61=(64,64)i64 Tensor.view Tensor.view_1298 2 1 7027 7028 x_windows.61 $input=7027 $shape=7028 #7027=(1,6,6,8,8,192)f32 #x_windows.61=(36,8,8,192)f32 Tensor.view Tensor.view_1299 2 1 x_windows.61 7030 x2.61 $input=x_windows.61 $shape=7030 #x_windows.61=(36,8,8,192)f32 #x2.61=(36,64,192)f32 aten::size pnnx_5966 2 1 x2.61 7035 7051 #x2.61=(36,64,192)f32 prim::NumToTensor pnnx_5967 1 1 7051 B_.61 aten::Int pnnx_5968 1 1 B_.61 7053 aten::Int pnnx_5969 1 1 B_.61 7054 aten::size pnnx_5970 2 1 x2.61 7036 7055 #x2.61=(36,64,192)f32 prim::NumToTensor pnnx_5971 1 1 7055 N.61 aten::Int pnnx_5972 1 1 N.61 7057 aten::Int pnnx_5973 1 1 N.61 7058 aten::Int pnnx_5974 1 1 N.61 7059 aten::Int pnnx_5975 1 1 N.61 7060 aten::Int pnnx_5976 1 1 N.61 7061 aten::Int pnnx_5977 1 1 N.61 7062 aten::size pnnx_5978 2 1 x2.61 7037 7063 #x2.61=(36,64,192)f32 prim::NumToTensor pnnx_5979 1 1 7063 C.129 aten::Int pnnx_5980 1 1 C.129 7065 nn.Linear layers_dfe.4.residual_group.blocks.5.attn.qkv 1 1 x2.61 7066 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.61=(36,64,192)f32 #7066=(36,64,576)f32 aten::div pnnx_5981 3 1 C.129 7034 7033 7067 aten::Int pnnx_5982 1 1 7067 7068 prim::ListConstruct pnnx_5983 5 1 7054 7062 7038 7039 7068 7069 prim::Constant pnnx_5985 0 1 21883 value=2 prim::Constant pnnx_5986 0 1 21884 value=0 prim::Constant pnnx_5987 0 1 21885 value=3 prim::Constant pnnx_5988 0 1 21886 value=1 prim::ListConstruct pnnx_5989 5 1 21883 21884 21885 21886 7040 7071 Tensor.reshape Tensor.reshape_492 2 1 7066 7069 7070 $input=7066 $shape=7069 #7066=(36,64,576)f32 #7070=(36,64,3,6,32)f32 prim::Constant pnnx_5991 0 1 21887 value=0 prim::Constant pnnx_5992 0 1 21888 value=0 prim::Constant pnnx_5994 0 1 21889 value=0 prim::Constant pnnx_5995 0 1 21890 value=1 prim::Constant pnnx_5997 0 1 21891 value=0 prim::Constant pnnx_5998 0 1 21892 value=2 torch.permute torch.permute_2645 2 1 7070 7071 qkv0.61 $input=7070 $dims=7071 #7070=(36,64,3,6,32)f32 #qkv0.61=(3,36,6,64,32)f32 Tensor.select Tensor.select_737 3 1 qkv0.61 21887 21888 q.61 $input=qkv0.61 $dim=21887 $index=21888 #qkv0.61=(3,36,6,64,32)f32 #q.61=(36,6,64,32)f32 aten::mul pnnx_6000 2 1 q.61 7032 q0.61 #q.61=(36,6,64,32)f32 #q0.61=(36,6,64,32)f32 Tensor.select Tensor.select_738 3 1 qkv0.61 21889 21890 k.61 $input=qkv0.61 $dim=21889 $index=21890 #qkv0.61=(3,36,6,64,32)f32 #k.61=(36,6,64,32)f32 prim::Constant pnnx_6003 0 1 21893 value=-1 prim::ListConstruct pnnx_6004 1 1 21893 7079 Tensor.view Tensor.view_1300 2 1 relative_position_index.61 7079 7080 $input=relative_position_index.61 $shape=7079 #relative_position_index.61=(64,64)i64 #7080=(4096)i64 prim::ListConstruct pnnx_6006 1 1 7080 7081 #7080=(4096)i64 prim::Constant pnnx_6008 0 1 21894 value=64 prim::Constant pnnx_6009 0 1 21895 value=-1 prim::ListConstruct pnnx_6010 3 1 7043 21894 21895 7083 Tensor.index Tensor.index_355 2 1 relative_position_bias_table.61 7081 7082 $input=relative_position_bias_table.61 $expr=7081 #relative_position_bias_table.61=(225,6)f32 #7082=(4096,6)f32 prim::Constant pnnx_6012 0 1 21896 value=2 prim::Constant pnnx_6013 0 1 21897 value=0 prim::Constant pnnx_6014 0 1 21898 value=1 prim::ListConstruct pnnx_6015 3 1 21896 21897 21898 7085 Tensor.view Tensor.view_1301 2 1 7082 7083 relative_position_bias.61 $input=7082 $shape=7083 #7082=(4096,6)f32 #relative_position_bias.61=(64,64,6)f32 prim::Constant pnnx_6019 0 1 21900 value=0 torch.permute torch.permute_2646 2 1 relative_position_bias.61 7085 7086 $input=relative_position_bias.61 $dims=7085 #relative_position_bias.61=(64,64,6)f32 #7086=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_90 1 1 7086 relative_position_bias0.61 memory_format=torch.contiguous_format $input=7086 #7086=(6,64,64)f32 #relative_position_bias0.61=(6,64,64)f32 prim::Constant pnnx_6021 0 1 21901 value=1 torch.transpose torch.transpose_3027 3 1 k.61 7041 7042 7077 $input=k.61 $dim0=7041 $dim1=7042 #k.61=(36,6,64,32)f32 #7077=(36,6,32,64)f32 torch.matmul torch.matmul_2262 2 1 q0.61 7077 attn.123 $input=q0.61 $other=7077 #q0.61=(36,6,64,32)f32 #7077=(36,6,32,64)f32 #attn.123=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3275 2 1 relative_position_bias0.61 21900 7088 $input=relative_position_bias0.61 $dim=21900 #relative_position_bias0.61=(6,64,64)f32 #7088=(1,6,64,64)f32 aten::add pnnx_6022 3 1 attn.123 7088 21901 attn0.31 #attn.123=(36,6,64,64)f32 #7088=(1,6,64,64)f32 #attn0.31=(36,6,64,64)f32 prim::Constant pnnx_6023 0 1 21902 value=0 aten::size pnnx_6024 2 1 attn_mask.31 21902 7090 #attn_mask.31=(36,64,64)f32 prim::NumToTensor pnnx_6025 1 1 7090 other.31 aten::Int pnnx_6026 1 1 other.31 7092 prim::Constant pnnx_6027 0 1 21903 value=trunc aten::div pnnx_6028 3 1 B_.61 other.31 21903 7093 aten::Int pnnx_6029 1 1 7093 7094 prim::Constant pnnx_6030 0 1 21904 value=6 prim::ListConstruct pnnx_6031 5 1 7094 7092 21904 7061 7060 7095 prim::Constant pnnx_6033 0 1 21905 value=1 prim::Constant pnnx_6035 0 1 21906 value=0 prim::Constant pnnx_6037 0 1 21907 value=1 Tensor.view Tensor.view_1302 2 1 attn0.31 7095 7096 $input=attn0.31 $shape=7095 #attn0.31=(36,6,64,64)f32 #7096=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3276 2 1 attn_mask.31 21905 7097 $input=attn_mask.31 $dim=21905 #attn_mask.31=(36,64,64)f32 #7097=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3277 2 1 7097 21906 7098 $input=7097 $dim=21906 #7097=(36,1,64,64)f32 #7098=(1,36,1,64,64)f32 aten::add pnnx_6038 3 1 7096 7098 21907 attn1.31 #7096=(1,36,6,64,64)f32 #7098=(1,36,1,64,64)f32 #attn1.31=(1,36,6,64,64)f32 prim::Constant pnnx_6039 0 1 21908 value=-1 prim::Constant pnnx_6040 0 1 21909 value=6 prim::ListConstruct pnnx_6041 4 1 21908 21909 7059 7058 7100 Tensor.view Tensor.view_1303 2 1 attn1.31 7100 input.135 $input=attn1.31 $shape=7100 #attn1.31=(1,36,6,64,64)f32 #input.135=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.5.attn.softmax 1 1 input.135 7102 dim=-1 #input.135=(36,6,64,64)f32 #7102=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.attn.attn_drop 1 1 7102 7103 #7102=(36,6,64,64)f32 #7103=(36,6,64,64)f32 Tensor.select Tensor.select_739 3 1 qkv0.61 21891 21892 v.61 $input=qkv0.61 $dim=21891 $index=21892 #qkv0.61=(3,36,6,64,32)f32 #v.61=(36,6,64,32)f32 prim::Constant pnnx_6044 0 1 21910 value=1 prim::Constant pnnx_6045 0 1 21911 value=2 torch.matmul torch.matmul_2263 2 1 7103 v.61 7104 $input=7103 $other=v.61 #7103=(36,6,64,64)f32 #v.61=(36,6,64,32)f32 #7104=(36,6,64,32)f32 prim::ListConstruct pnnx_6047 3 1 7053 7057 7065 7106 torch.transpose torch.transpose_3028 3 1 7104 21910 21911 7105 $input=7104 $dim0=21910 $dim1=21911 #7104=(36,6,64,32)f32 #7105=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_493 2 1 7105 7106 input0.63 $input=7105 $shape=7106 #7105=(36,64,6,32)f32 #input0.63=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.5.attn.proj 1 1 input0.63 7108 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.63=(36,64,192)f32 #7108=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.attn.proj_drop 1 1 7108 7109 #7108=(36,64,192)f32 #7109=(36,64,192)f32 prim::Constant pnnx_6049 0 1 21912 value=-1 prim::Constant pnnx_6050 0 1 21913 value=8 prim::Constant pnnx_6051 0 1 21914 value=8 prim::ListConstruct pnnx_6052 4 1 21912 21913 21914 6999 7110 prim::Constant pnnx_6054 0 1 21915 value=8 prim::Constant pnnx_6055 0 1 21916 value=trunc aten::div pnnx_6056 3 1 H.1 21915 21916 7112 aten::Int pnnx_6057 1 1 7112 7113 prim::Constant pnnx_6058 0 1 21917 value=8 prim::Constant pnnx_6059 0 1 21918 value=trunc aten::div pnnx_6060 3 1 W.1 21917 21918 7114 aten::Int pnnx_6061 1 1 7114 7115 prim::Constant pnnx_6062 0 1 21919 value=1 prim::Constant pnnx_6063 0 1 21920 value=8 prim::Constant pnnx_6064 0 1 21921 value=8 prim::Constant pnnx_6065 0 1 21922 value=-1 prim::ListConstruct pnnx_6066 6 1 21919 7113 7115 21920 21921 21922 7116 prim::Constant pnnx_6068 0 1 21923 value=0 prim::Constant pnnx_6069 0 1 21924 value=1 prim::Constant pnnx_6070 0 1 21925 value=3 prim::Constant pnnx_6071 0 1 21926 value=2 prim::Constant pnnx_6072 0 1 21927 value=4 prim::Constant pnnx_6073 0 1 21928 value=5 prim::ListConstruct pnnx_6074 6 1 21923 21924 21925 21926 21927 21928 7118 Tensor.view Tensor.view_1304 2 1 7109 7110 windows.61 $input=7109 $shape=7110 #7109=(36,64,192)f32 #windows.61=(36,8,8,192)f32 Tensor.view Tensor.view_1305 2 1 windows.61 7116 x3.61 $input=windows.61 $shape=7116 #windows.61=(36,8,8,192)f32 #x3.61=(1,6,6,8,8,192)f32 prim::Constant pnnx_6078 0 1 21930 value=1 prim::Constant pnnx_6079 0 1 21931 value=-1 prim::ListConstruct pnnx_6080 4 1 21930 208 448 21931 7121 torch.permute torch.permute_2647 2 1 x3.61 7118 7119 $input=x3.61 $dims=7118 #x3.61=(1,6,6,8,8,192)f32 #7119=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_91 1 1 7119 7120 memory_format=torch.contiguous_format $input=7119 #7119=(1,6,8,6,8,192)f32 #7120=(1,6,8,6,8,192)f32 prim::Constant pnnx_6082 0 1 21932 value=4 prim::Constant pnnx_6083 0 1 21933 value=4 prim::ListConstruct pnnx_6084 2 1 21932 21933 7123 prim::Constant pnnx_6085 0 1 21934 value=1 prim::Constant pnnx_6086 0 1 21935 value=2 prim::ListConstruct pnnx_6087 2 1 21934 21935 7124 Tensor.view Tensor.view_1306 2 1 7120 7121 shifted_x.31 $input=7120 $shape=7121 #7120=(1,6,8,6,8,192)f32 #shifted_x.31=(1,48,48,192)f32 aten::mul pnnx_6089 2 1 H.1 W.1 7126 aten::Int pnnx_6090 1 1 7126 7127 prim::ListConstruct pnnx_6091 3 1 6994 7127 6998 7128 prim::Constant pnnx_6093 0 1 7130 value=None prim::Constant pnnx_6094 0 1 21936 value=1 torch.roll torch.roll_2449 3 1 shifted_x.31 7123 7124 x4.61 $input=shifted_x.31 $shifts=7123 $dims=7124 #shifted_x.31=(1,48,48,192)f32 #x4.61=(1,48,48,192)f32 Tensor.view Tensor.view_1307 2 1 x4.61 7128 x5.31 $input=x4.61 $shape=7128 #x4.61=(1,48,48,192)f32 #x5.31=(1,2304,192)f32 aten::add pnnx_6095 3 1 6973 x5.31 21936 input.137 #6973=(1,2304,192)f32 #x5.31=(1,2304,192)f32 #input.137=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.5.norm2 1 1 input.137 7132 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.137=(1,2304,192)f32 #7132=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.5.mlp.fc1 1 1 7132 7137 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #7132=(1,2304,192)f32 #7137=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.5.mlp.act 1 1 7137 7138 #7137=(1,2304,384)f32 #7138=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.mlp.drop 1 1 7138 7139 #7138=(1,2304,384)f32 #7139=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.5.mlp.fc2 1 1 7139 7140 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #7139=(1,2304,384)f32 #7140=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.mlp.drop 1 1 7140 7141 #7140=(1,2304,192)f32 #7141=(1,2304,192)f32 prim::Constant pnnx_6096 0 1 7142 value=None prim::Constant pnnx_6097 0 1 21937 value=1 aten::add pnnx_6098 3 1 input.137 7141 21937 7143 #input.137=(1,2304,192)f32 #7141=(1,2304,192)f32 #7143=(1,2304,192)f32 prim::Constant pnnx_6099 0 1 7144 value=0 prim::Constant pnnx_6100 0 1 7145 value=1 prim::Constant pnnx_6101 0 1 7146 value=2 prim::Constant pnnx_6102 0 1 7147 value=192 aten::size pnnx_6103 2 1 7143 7144 7148 #7143=(1,2304,192)f32 prim::NumToTensor pnnx_6104 1 1 7148 B.73 aten::Int pnnx_6105 1 1 B.73 7150 prim::ListConstruct pnnx_6107 4 1 7150 7147 205 445 7152 torch.transpose torch.transpose_3029 3 1 7143 7145 7146 7151 $input=7143 $dim0=7145 $dim1=7146 #7143=(1,2304,192)f32 #7151=(1,192,2304)f32 Tensor.view Tensor.view_1308 2 1 7151 7152 input.139 $input=7151 $shape=7152 #7151=(1,192,2304)f32 #input.139=(1,192,48,48)f32 nn.Conv2d layers_dfe.4.conv 1 1 input.139 7154 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.139=(1,192,48,48)f32 #7154=(1,192,48,48)f32 prim::Constant pnnx_6109 0 1 7155 value=-1 prim::Constant pnnx_6110 0 1 7156 value=2 prim::Constant pnnx_6111 0 1 7157 value=1 prim::Constant pnnx_6113 0 1 21938 value=2 torch.flatten torch.flatten_2188 3 1 7154 7156 7155 7158 $input=7154 $start_dim=7156 $end_dim=7155 #7154=(1,192,48,48)f32 #7158=(1,192,2304)f32 torch.transpose torch.transpose_3030 3 1 7158 7157 21938 7159 $input=7158 $dim0=7157 $dim1=21938 #7158=(1,192,2304)f32 #7159=(1,2304,192)f32 aten::add pnnx_6115 3 1 7159 6178 6179 7160 #7159=(1,2304,192)f32 #6178=(1,2304,192)f32 #7160=(1,2304,192)f32 prim::Constant pnnx_6116 0 1 7161 value=1 prim::Constant pnnx_6117 0 1 7178 value=trunc prim::Constant pnnx_6118 0 1 7179 value=8 prim::Constant pnnx_6119 0 1 7180 value=0 prim::Constant pnnx_6120 0 1 7181 value=2 prim::Constant pnnx_6121 0 1 7182 value=1 prim::Constant pnnx_6122 0 1 7183 value=3 prim::Constant pnnx_6123 0 1 7184 value=8 prim::Constant pnnx_6124 0 1 7185 value=4 prim::Constant pnnx_6125 0 1 7186 value=5 prim::Constant pnnx_6126 0 1 7187 value=-1 prim::Constant pnnx_6127 0 1 7188 value=64 aten::size pnnx_6128 2 1 7160 7180 7194 #7160=(1,2304,192)f32 prim::NumToTensor pnnx_6129 1 1 7194 B.77 aten::Int pnnx_6130 1 1 B.77 7196 aten::Int pnnx_6131 1 1 B.77 7197 aten::size pnnx_6132 2 1 7160 7181 7198 #7160=(1,2304,192)f32 prim::NumToTensor pnnx_6133 1 1 7198 C.131 aten::Int pnnx_6134 1 1 C.131 7200 aten::Int pnnx_6135 1 1 C.131 7201 aten::Int pnnx_6136 1 1 C.131 7202 aten::Int pnnx_6137 1 1 C.131 7203 nn.LayerNorm layers_dfe.5.residual_group.blocks.0.norm1 1 1 7160 7204 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #7160=(1,2304,192)f32 #7204=(1,2304,192)f32 prim::ListConstruct pnnx_6138 4 1 7197 202 442 7203 7205 prim::Constant pnnx_6140 0 1 21939 value=0 Tensor.view Tensor.view_1309 2 1 7204 7205 x.63 $input=7204 $shape=7205 #7204=(1,2304,192)f32 #x.63=(1,48,48,192)f32 aten::size pnnx_6141 2 1 x.63 21939 7207 #x.63=(1,48,48,192)f32 prim::NumToTensor pnnx_6142 1 1 7207 B0.63 aten::Int pnnx_6143 1 1 B0.63 7209 aten::size pnnx_6144 2 1 x.63 7182 7210 #x.63=(1,48,48,192)f32 prim::NumToTensor pnnx_6145 1 1 7210 7211 prim::Constant pnnx_6146 0 1 21940 value=2 aten::size pnnx_6147 2 1 x.63 21940 7212 #x.63=(1,48,48,192)f32 prim::NumToTensor pnnx_6148 1 1 7212 7213 aten::size pnnx_6149 2 1 x.63 7183 7214 #x.63=(1,48,48,192)f32 prim::NumToTensor pnnx_6150 1 1 7214 C0.63 aten::Int pnnx_6151 1 1 C0.63 7216 aten::Int pnnx_6152 1 1 C0.63 7217 aten::div pnnx_6153 3 1 7211 7179 7178 7218 aten::Int pnnx_6154 1 1 7218 7219 prim::Constant pnnx_6155 0 1 21941 value=8 prim::Constant pnnx_6156 0 1 21942 value=trunc aten::div pnnx_6157 3 1 7213 21941 21942 7220 aten::Int pnnx_6158 1 1 7220 7221 prim::Constant pnnx_6159 0 1 21943 value=8 prim::ListConstruct pnnx_6160 6 1 7209 7219 7184 7221 21943 7217 7222 prim::Constant pnnx_6162 0 1 21944 value=0 prim::Constant pnnx_6163 0 1 21945 value=1 prim::Constant pnnx_6164 0 1 21946 value=3 prim::Constant pnnx_6165 0 1 21947 value=2 prim::ListConstruct pnnx_6166 6 1 21944 21945 21946 21947 7185 7186 7224 Tensor.view Tensor.view_1310 2 1 x.63 7222 x0.63 $input=x.63 $shape=7222 #x.63=(1,48,48,192)f32 #x0.63=(1,6,8,6,8,192)f32 prim::Constant pnnx_6170 0 1 21949 value=8 prim::Constant pnnx_6171 0 1 21950 value=8 prim::ListConstruct pnnx_6172 4 1 7187 21949 21950 7216 7227 torch.permute torch.permute_2648 2 1 x0.63 7224 7225 $input=x0.63 $dims=7224 #x0.63=(1,6,8,6,8,192)f32 #7225=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_92 1 1 7225 7226 memory_format=torch.contiguous_format $input=7225 #7225=(1,6,6,8,8,192)f32 #7226=(1,6,6,8,8,192)f32 prim::Constant pnnx_6174 0 1 21951 value=-1 prim::ListConstruct pnnx_6175 3 1 21951 7188 7202 7229 prim::Constant pnnx_6177 0 1 7231 value=1.767767e-01 prim::Constant pnnx_6178 0 1 7232 value=trunc prim::Constant pnnx_6179 0 1 7233 value=6 prim::Constant pnnx_6180 0 1 7234 value=0 prim::Constant pnnx_6181 0 1 7235 value=1 prim::Constant pnnx_6182 0 1 7236 value=2 prim::Constant pnnx_6183 0 1 7237 value=3 prim::Constant pnnx_6184 0 1 7238 value=6 prim::Constant pnnx_6185 0 1 7239 value=4 prim::Constant pnnx_6186 0 1 7240 value=-2 prim::Constant pnnx_6187 0 1 7241 value=-1 prim::Constant pnnx_6188 0 1 7242 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.0.attn 0 1 relative_position_bias_table.63 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.63=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.0.attn 0 1 relative_position_index.63 @relative_position_index=(64,64)i64 #relative_position_index.63=(64,64)i64 Tensor.view Tensor.view_1311 2 1 7226 7227 x_windows.63 $input=7226 $shape=7227 #7226=(1,6,6,8,8,192)f32 #x_windows.63=(36,8,8,192)f32 Tensor.view Tensor.view_1312 2 1 x_windows.63 7229 x1.63 $input=x_windows.63 $shape=7229 #x_windows.63=(36,8,8,192)f32 #x1.63=(36,64,192)f32 aten::size pnnx_6189 2 1 x1.63 7234 7250 #x1.63=(36,64,192)f32 prim::NumToTensor pnnx_6190 1 1 7250 B_.63 aten::Int pnnx_6191 1 1 B_.63 7252 aten::Int pnnx_6192 1 1 B_.63 7253 aten::size pnnx_6193 2 1 x1.63 7235 7254 #x1.63=(36,64,192)f32 prim::NumToTensor pnnx_6194 1 1 7254 N.63 aten::Int pnnx_6195 1 1 N.63 7256 aten::Int pnnx_6196 1 1 N.63 7257 aten::size pnnx_6197 2 1 x1.63 7236 7258 #x1.63=(36,64,192)f32 prim::NumToTensor pnnx_6198 1 1 7258 C.133 aten::Int pnnx_6199 1 1 C.133 7260 nn.Linear layers_dfe.5.residual_group.blocks.0.attn.qkv 1 1 x1.63 7261 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.63=(36,64,192)f32 #7261=(36,64,576)f32 aten::div pnnx_6200 3 1 C.133 7233 7232 7262 aten::Int pnnx_6201 1 1 7262 7263 prim::ListConstruct pnnx_6202 5 1 7253 7257 7237 7238 7263 7264 prim::Constant pnnx_6204 0 1 21952 value=2 prim::Constant pnnx_6205 0 1 21953 value=0 prim::Constant pnnx_6206 0 1 21954 value=3 prim::Constant pnnx_6207 0 1 21955 value=1 prim::ListConstruct pnnx_6208 5 1 21952 21953 21954 21955 7239 7266 Tensor.reshape Tensor.reshape_494 2 1 7261 7264 7265 $input=7261 $shape=7264 #7261=(36,64,576)f32 #7265=(36,64,3,6,32)f32 prim::Constant pnnx_6210 0 1 21956 value=0 prim::Constant pnnx_6211 0 1 21957 value=0 prim::Constant pnnx_6213 0 1 21958 value=0 prim::Constant pnnx_6214 0 1 21959 value=1 prim::Constant pnnx_6216 0 1 21960 value=0 prim::Constant pnnx_6217 0 1 21961 value=2 torch.permute torch.permute_2649 2 1 7265 7266 qkv0.63 $input=7265 $dims=7266 #7265=(36,64,3,6,32)f32 #qkv0.63=(3,36,6,64,32)f32 Tensor.select Tensor.select_740 3 1 qkv0.63 21956 21957 q.63 $input=qkv0.63 $dim=21956 $index=21957 #qkv0.63=(3,36,6,64,32)f32 #q.63=(36,6,64,32)f32 aten::mul pnnx_6219 2 1 q.63 7231 q0.63 #q.63=(36,6,64,32)f32 #q0.63=(36,6,64,32)f32 Tensor.select Tensor.select_741 3 1 qkv0.63 21958 21959 k.63 $input=qkv0.63 $dim=21958 $index=21959 #qkv0.63=(3,36,6,64,32)f32 #k.63=(36,6,64,32)f32 prim::Constant pnnx_6222 0 1 21962 value=-1 prim::ListConstruct pnnx_6223 1 1 21962 7274 Tensor.view Tensor.view_1313 2 1 relative_position_index.63 7274 7275 $input=relative_position_index.63 $shape=7274 #relative_position_index.63=(64,64)i64 #7275=(4096)i64 prim::ListConstruct pnnx_6225 1 1 7275 7276 #7275=(4096)i64 prim::Constant pnnx_6227 0 1 21963 value=64 prim::Constant pnnx_6228 0 1 21964 value=-1 prim::ListConstruct pnnx_6229 3 1 7242 21963 21964 7278 Tensor.index Tensor.index_356 2 1 relative_position_bias_table.63 7276 7277 $input=relative_position_bias_table.63 $expr=7276 #relative_position_bias_table.63=(225,6)f32 #7277=(4096,6)f32 prim::Constant pnnx_6231 0 1 21965 value=2 prim::Constant pnnx_6232 0 1 21966 value=0 prim::Constant pnnx_6233 0 1 21967 value=1 prim::ListConstruct pnnx_6234 3 1 21965 21966 21967 7280 Tensor.view Tensor.view_1314 2 1 7277 7278 relative_position_bias.63 $input=7277 $shape=7278 #7277=(4096,6)f32 #relative_position_bias.63=(64,64,6)f32 prim::Constant pnnx_6238 0 1 21969 value=0 torch.permute torch.permute_2650 2 1 relative_position_bias.63 7280 7281 $input=relative_position_bias.63 $dims=7280 #relative_position_bias.63=(64,64,6)f32 #7281=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_93 1 1 7281 relative_position_bias0.63 memory_format=torch.contiguous_format $input=7281 #7281=(6,64,64)f32 #relative_position_bias0.63=(6,64,64)f32 prim::Constant pnnx_6240 0 1 21970 value=1 torch.transpose torch.transpose_3031 3 1 k.63 7240 7241 7272 $input=k.63 $dim0=7240 $dim1=7241 #k.63=(36,6,64,32)f32 #7272=(36,6,32,64)f32 torch.matmul torch.matmul_2264 2 1 q0.63 7272 attn.127 $input=q0.63 $other=7272 #q0.63=(36,6,64,32)f32 #7272=(36,6,32,64)f32 #attn.127=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3278 2 1 relative_position_bias0.63 21969 7283 $input=relative_position_bias0.63 $dim=21969 #relative_position_bias0.63=(6,64,64)f32 #7283=(1,6,64,64)f32 aten::add pnnx_6241 3 1 attn.127 7283 21970 input.145 #attn.127=(36,6,64,64)f32 #7283=(1,6,64,64)f32 #input.145=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.0.attn.softmax 1 1 input.145 7285 dim=-1 #input.145=(36,6,64,64)f32 #7285=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.attn.attn_drop 1 1 7285 7286 #7285=(36,6,64,64)f32 #7286=(36,6,64,64)f32 Tensor.select Tensor.select_742 3 1 qkv0.63 21960 21961 v.63 $input=qkv0.63 $dim=21960 $index=21961 #qkv0.63=(3,36,6,64,32)f32 #v.63=(36,6,64,32)f32 prim::Constant pnnx_6243 0 1 21971 value=1 prim::Constant pnnx_6244 0 1 21972 value=2 torch.matmul torch.matmul_2265 2 1 7286 v.63 7287 $input=7286 $other=v.63 #7286=(36,6,64,64)f32 #v.63=(36,6,64,32)f32 #7287=(36,6,64,32)f32 prim::ListConstruct pnnx_6246 3 1 7252 7256 7260 7289 torch.transpose torch.transpose_3032 3 1 7287 21971 21972 7288 $input=7287 $dim0=21971 $dim1=21972 #7287=(36,6,64,32)f32 #7288=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_495 2 1 7288 7289 input0.67 $input=7288 $shape=7289 #7288=(36,64,6,32)f32 #input0.67=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.0.attn.proj 1 1 input0.67 7291 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.67=(36,64,192)f32 #7291=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.attn.proj_drop 1 1 7291 7292 #7291=(36,64,192)f32 #7292=(36,64,192)f32 prim::Constant pnnx_6248 0 1 21973 value=-1 prim::Constant pnnx_6249 0 1 21974 value=8 prim::Constant pnnx_6250 0 1 21975 value=8 prim::ListConstruct pnnx_6251 4 1 21973 21974 21975 7201 7293 prim::Constant pnnx_6253 0 1 21976 value=8 prim::Constant pnnx_6254 0 1 21977 value=trunc aten::div pnnx_6255 3 1 H.1 21976 21977 7295 aten::Int pnnx_6256 1 1 7295 7296 prim::Constant pnnx_6257 0 1 21978 value=8 prim::Constant pnnx_6258 0 1 21979 value=trunc aten::div pnnx_6259 3 1 W.1 21978 21979 7297 aten::Int pnnx_6260 1 1 7297 7298 prim::Constant pnnx_6261 0 1 21980 value=1 prim::Constant pnnx_6262 0 1 21981 value=8 prim::Constant pnnx_6263 0 1 21982 value=8 prim::Constant pnnx_6264 0 1 21983 value=-1 prim::ListConstruct pnnx_6265 6 1 21980 7296 7298 21981 21982 21983 7299 prim::Constant pnnx_6267 0 1 21984 value=0 prim::Constant pnnx_6268 0 1 21985 value=1 prim::Constant pnnx_6269 0 1 21986 value=3 prim::Constant pnnx_6270 0 1 21987 value=2 prim::Constant pnnx_6271 0 1 21988 value=4 prim::Constant pnnx_6272 0 1 21989 value=5 prim::ListConstruct pnnx_6273 6 1 21984 21985 21986 21987 21988 21989 7301 Tensor.view Tensor.view_1315 2 1 7292 7293 windows.63 $input=7292 $shape=7293 #7292=(36,64,192)f32 #windows.63=(36,8,8,192)f32 Tensor.view Tensor.view_1316 2 1 windows.63 7299 x2.63 $input=windows.63 $shape=7299 #windows.63=(36,8,8,192)f32 #x2.63=(1,6,6,8,8,192)f32 prim::Constant pnnx_6277 0 1 21991 value=1 prim::Constant pnnx_6278 0 1 21992 value=-1 prim::ListConstruct pnnx_6279 4 1 21991 199 439 21992 7304 torch.permute torch.permute_2651 2 1 x2.63 7301 7302 $input=x2.63 $dims=7301 #x2.63=(1,6,6,8,8,192)f32 #7302=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_94 1 1 7302 7303 memory_format=torch.contiguous_format $input=7302 #7302=(1,6,8,6,8,192)f32 #7303=(1,6,8,6,8,192)f32 aten::mul pnnx_6281 2 1 H.1 W.1 7306 aten::Int pnnx_6282 1 1 7306 7307 prim::ListConstruct pnnx_6283 3 1 7196 7307 7200 7308 prim::Constant pnnx_6285 0 1 7310 value=None prim::Constant pnnx_6286 0 1 21993 value=1 Tensor.view Tensor.view_1317 2 1 7303 7304 x3.63 $input=7303 $shape=7304 #7303=(1,6,8,6,8,192)f32 #x3.63=(1,48,48,192)f32 Tensor.view Tensor.view_1318 2 1 x3.63 7308 x4.63 $input=x3.63 $shape=7308 #x3.63=(1,48,48,192)f32 #x4.63=(1,2304,192)f32 aten::add pnnx_6287 3 1 7160 x4.63 21993 input.147 #7160=(1,2304,192)f32 #x4.63=(1,2304,192)f32 #input.147=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.0.norm2 1 1 input.147 7312 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.147=(1,2304,192)f32 #7312=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.0.mlp.fc1 1 1 7312 7317 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #7312=(1,2304,192)f32 #7317=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.0.mlp.act 1 1 7317 7318 #7317=(1,2304,384)f32 #7318=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.mlp.drop 1 1 7318 7319 #7318=(1,2304,384)f32 #7319=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.0.mlp.fc2 1 1 7319 7320 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #7319=(1,2304,384)f32 #7320=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.mlp.drop 1 1 7320 7321 #7320=(1,2304,192)f32 #7321=(1,2304,192)f32 prim::Constant pnnx_6288 0 1 7322 value=None prim::Constant pnnx_6289 0 1 21994 value=1 aten::add pnnx_6290 3 1 input.147 7321 21994 7323 #input.147=(1,2304,192)f32 #7321=(1,2304,192)f32 #7323=(1,2304,192)f32 prim::Constant pnnx_6291 0 1 7324 value=trunc prim::Constant pnnx_6292 0 1 7325 value=8 prim::Constant pnnx_6293 0 1 7326 value=0 prim::Constant pnnx_6294 0 1 7327 value=2 prim::Constant pnnx_6295 0 1 7328 value=-4 prim::Constant pnnx_6296 0 1 7329 value=1 prim::Constant pnnx_6297 0 1 7330 value=3 prim::Constant pnnx_6298 0 1 7331 value=8 prim::Constant pnnx_6299 0 1 7332 value=4 prim::Constant pnnx_6300 0 1 7333 value=5 prim::Constant pnnx_6301 0 1 7334 value=-1 prim::Constant pnnx_6302 0 1 7335 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.1 0 1 attn_mask.33 @attn_mask=(36,64,64)f32 #attn_mask.33=(36,64,64)f32 aten::size pnnx_6303 2 1 7323 7326 7342 #7323=(1,2304,192)f32 prim::NumToTensor pnnx_6304 1 1 7342 B.79 aten::Int pnnx_6305 1 1 B.79 7344 aten::Int pnnx_6306 1 1 B.79 7345 aten::size pnnx_6307 2 1 7323 7327 7346 #7323=(1,2304,192)f32 prim::NumToTensor pnnx_6308 1 1 7346 C.135 aten::Int pnnx_6309 1 1 C.135 7348 aten::Int pnnx_6310 1 1 C.135 7349 aten::Int pnnx_6311 1 1 C.135 7350 aten::Int pnnx_6312 1 1 C.135 7351 nn.LayerNorm layers_dfe.5.residual_group.blocks.1.norm1 1 1 7323 7352 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #7323=(1,2304,192)f32 #7352=(1,2304,192)f32 prim::ListConstruct pnnx_6313 4 1 7345 196 436 7351 7353 prim::Constant pnnx_6315 0 1 21995 value=-4 prim::ListConstruct pnnx_6316 2 1 7328 21995 7355 prim::Constant pnnx_6317 0 1 21996 value=2 prim::ListConstruct pnnx_6318 2 1 7329 21996 7356 Tensor.view Tensor.view_1319 2 1 7352 7353 x.65 $input=7352 $shape=7353 #7352=(1,2304,192)f32 #x.65=(1,48,48,192)f32 prim::Constant pnnx_6320 0 1 21997 value=0 torch.roll torch.roll_2450 3 1 x.65 7355 7356 x0.65 $input=x.65 $shifts=7355 $dims=7356 #x.65=(1,48,48,192)f32 #x0.65=(1,48,48,192)f32 aten::size pnnx_6321 2 1 x0.65 21997 7358 #x0.65=(1,48,48,192)f32 prim::NumToTensor pnnx_6322 1 1 7358 B0.65 aten::Int pnnx_6323 1 1 B0.65 7360 prim::Constant pnnx_6324 0 1 21998 value=1 aten::size pnnx_6325 2 1 x0.65 21998 7361 #x0.65=(1,48,48,192)f32 prim::NumToTensor pnnx_6326 1 1 7361 7362 prim::Constant pnnx_6327 0 1 21999 value=2 aten::size pnnx_6328 2 1 x0.65 21999 7363 #x0.65=(1,48,48,192)f32 prim::NumToTensor pnnx_6329 1 1 7363 7364 aten::size pnnx_6330 2 1 x0.65 7330 7365 #x0.65=(1,48,48,192)f32 prim::NumToTensor pnnx_6331 1 1 7365 C0.65 aten::Int pnnx_6332 1 1 C0.65 7367 aten::Int pnnx_6333 1 1 C0.65 7368 aten::div pnnx_6334 3 1 7362 7325 7324 7369 aten::Int pnnx_6335 1 1 7369 7370 prim::Constant pnnx_6336 0 1 22000 value=8 prim::Constant pnnx_6337 0 1 22001 value=trunc aten::div pnnx_6338 3 1 7364 22000 22001 7371 aten::Int pnnx_6339 1 1 7371 7372 prim::Constant pnnx_6340 0 1 22002 value=8 prim::ListConstruct pnnx_6341 6 1 7360 7370 7331 7372 22002 7368 7373 prim::Constant pnnx_6343 0 1 22003 value=0 prim::Constant pnnx_6344 0 1 22004 value=1 prim::Constant pnnx_6345 0 1 22005 value=3 prim::Constant pnnx_6346 0 1 22006 value=2 prim::ListConstruct pnnx_6347 6 1 22003 22004 22005 22006 7332 7333 7375 Tensor.view Tensor.view_1320 2 1 x0.65 7373 x1.65 $input=x0.65 $shape=7373 #x0.65=(1,48,48,192)f32 #x1.65=(1,6,8,6,8,192)f32 prim::Constant pnnx_6351 0 1 22008 value=8 prim::Constant pnnx_6352 0 1 22009 value=8 prim::ListConstruct pnnx_6353 4 1 7334 22008 22009 7367 7378 torch.permute torch.permute_2652 2 1 x1.65 7375 7376 $input=x1.65 $dims=7375 #x1.65=(1,6,8,6,8,192)f32 #7376=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_95 1 1 7376 7377 memory_format=torch.contiguous_format $input=7376 #7376=(1,6,6,8,8,192)f32 #7377=(1,6,6,8,8,192)f32 prim::Constant pnnx_6355 0 1 22010 value=-1 prim::ListConstruct pnnx_6356 3 1 22010 7335 7350 7380 prim::Constant pnnx_6358 0 1 7382 value=1.767767e-01 prim::Constant pnnx_6359 0 1 7383 value=trunc prim::Constant pnnx_6360 0 1 7384 value=6 prim::Constant pnnx_6361 0 1 7385 value=0 prim::Constant pnnx_6362 0 1 7386 value=1 prim::Constant pnnx_6363 0 1 7387 value=2 prim::Constant pnnx_6364 0 1 7388 value=3 prim::Constant pnnx_6365 0 1 7389 value=6 prim::Constant pnnx_6366 0 1 7390 value=4 prim::Constant pnnx_6367 0 1 7391 value=-2 prim::Constant pnnx_6368 0 1 7392 value=-1 prim::Constant pnnx_6369 0 1 7393 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.1.attn 0 1 relative_position_bias_table.65 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.65=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.1.attn 0 1 relative_position_index.65 @relative_position_index=(64,64)i64 #relative_position_index.65=(64,64)i64 Tensor.view Tensor.view_1321 2 1 7377 7378 x_windows.65 $input=7377 $shape=7378 #7377=(1,6,6,8,8,192)f32 #x_windows.65=(36,8,8,192)f32 Tensor.view Tensor.view_1322 2 1 x_windows.65 7380 x2.65 $input=x_windows.65 $shape=7380 #x_windows.65=(36,8,8,192)f32 #x2.65=(36,64,192)f32 aten::size pnnx_6370 2 1 x2.65 7385 7401 #x2.65=(36,64,192)f32 prim::NumToTensor pnnx_6371 1 1 7401 B_.65 aten::Int pnnx_6372 1 1 B_.65 7403 aten::Int pnnx_6373 1 1 B_.65 7404 aten::size pnnx_6374 2 1 x2.65 7386 7405 #x2.65=(36,64,192)f32 prim::NumToTensor pnnx_6375 1 1 7405 N.65 aten::Int pnnx_6376 1 1 N.65 7407 aten::Int pnnx_6377 1 1 N.65 7408 aten::Int pnnx_6378 1 1 N.65 7409 aten::Int pnnx_6379 1 1 N.65 7410 aten::Int pnnx_6380 1 1 N.65 7411 aten::Int pnnx_6381 1 1 N.65 7412 aten::size pnnx_6382 2 1 x2.65 7387 7413 #x2.65=(36,64,192)f32 prim::NumToTensor pnnx_6383 1 1 7413 C.137 aten::Int pnnx_6384 1 1 C.137 7415 nn.Linear layers_dfe.5.residual_group.blocks.1.attn.qkv 1 1 x2.65 7416 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.65=(36,64,192)f32 #7416=(36,64,576)f32 aten::div pnnx_6385 3 1 C.137 7384 7383 7417 aten::Int pnnx_6386 1 1 7417 7418 prim::ListConstruct pnnx_6387 5 1 7404 7412 7388 7389 7418 7419 prim::Constant pnnx_6389 0 1 22011 value=2 prim::Constant pnnx_6390 0 1 22012 value=0 prim::Constant pnnx_6391 0 1 22013 value=3 prim::Constant pnnx_6392 0 1 22014 value=1 prim::ListConstruct pnnx_6393 5 1 22011 22012 22013 22014 7390 7421 Tensor.reshape Tensor.reshape_496 2 1 7416 7419 7420 $input=7416 $shape=7419 #7416=(36,64,576)f32 #7420=(36,64,3,6,32)f32 prim::Constant pnnx_6395 0 1 22015 value=0 prim::Constant pnnx_6396 0 1 22016 value=0 prim::Constant pnnx_6398 0 1 22017 value=0 prim::Constant pnnx_6399 0 1 22018 value=1 prim::Constant pnnx_6401 0 1 22019 value=0 prim::Constant pnnx_6402 0 1 22020 value=2 torch.permute torch.permute_2653 2 1 7420 7421 qkv0.65 $input=7420 $dims=7421 #7420=(36,64,3,6,32)f32 #qkv0.65=(3,36,6,64,32)f32 Tensor.select Tensor.select_743 3 1 qkv0.65 22015 22016 q.65 $input=qkv0.65 $dim=22015 $index=22016 #qkv0.65=(3,36,6,64,32)f32 #q.65=(36,6,64,32)f32 aten::mul pnnx_6404 2 1 q.65 7382 q0.65 #q.65=(36,6,64,32)f32 #q0.65=(36,6,64,32)f32 Tensor.select Tensor.select_744 3 1 qkv0.65 22017 22018 k.65 $input=qkv0.65 $dim=22017 $index=22018 #qkv0.65=(3,36,6,64,32)f32 #k.65=(36,6,64,32)f32 prim::Constant pnnx_6407 0 1 22021 value=-1 prim::ListConstruct pnnx_6408 1 1 22021 7429 Tensor.view Tensor.view_1323 2 1 relative_position_index.65 7429 7430 $input=relative_position_index.65 $shape=7429 #relative_position_index.65=(64,64)i64 #7430=(4096)i64 prim::ListConstruct pnnx_6410 1 1 7430 7431 #7430=(4096)i64 prim::Constant pnnx_6412 0 1 22022 value=64 prim::Constant pnnx_6413 0 1 22023 value=-1 prim::ListConstruct pnnx_6414 3 1 7393 22022 22023 7433 Tensor.index Tensor.index_357 2 1 relative_position_bias_table.65 7431 7432 $input=relative_position_bias_table.65 $expr=7431 #relative_position_bias_table.65=(225,6)f32 #7432=(4096,6)f32 prim::Constant pnnx_6416 0 1 22024 value=2 prim::Constant pnnx_6417 0 1 22025 value=0 prim::Constant pnnx_6418 0 1 22026 value=1 prim::ListConstruct pnnx_6419 3 1 22024 22025 22026 7435 Tensor.view Tensor.view_1324 2 1 7432 7433 relative_position_bias.65 $input=7432 $shape=7433 #7432=(4096,6)f32 #relative_position_bias.65=(64,64,6)f32 prim::Constant pnnx_6423 0 1 22028 value=0 torch.permute torch.permute_2654 2 1 relative_position_bias.65 7435 7436 $input=relative_position_bias.65 $dims=7435 #relative_position_bias.65=(64,64,6)f32 #7436=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_96 1 1 7436 relative_position_bias0.65 memory_format=torch.contiguous_format $input=7436 #7436=(6,64,64)f32 #relative_position_bias0.65=(6,64,64)f32 prim::Constant pnnx_6425 0 1 22029 value=1 torch.transpose torch.transpose_3033 3 1 k.65 7391 7392 7427 $input=k.65 $dim0=7391 $dim1=7392 #k.65=(36,6,64,32)f32 #7427=(36,6,32,64)f32 torch.matmul torch.matmul_2266 2 1 q0.65 7427 attn.131 $input=q0.65 $other=7427 #q0.65=(36,6,64,32)f32 #7427=(36,6,32,64)f32 #attn.131=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3279 2 1 relative_position_bias0.65 22028 7438 $input=relative_position_bias0.65 $dim=22028 #relative_position_bias0.65=(6,64,64)f32 #7438=(1,6,64,64)f32 aten::add pnnx_6426 3 1 attn.131 7438 22029 attn0.33 #attn.131=(36,6,64,64)f32 #7438=(1,6,64,64)f32 #attn0.33=(36,6,64,64)f32 prim::Constant pnnx_6427 0 1 22030 value=0 aten::size pnnx_6428 2 1 attn_mask.33 22030 7440 #attn_mask.33=(36,64,64)f32 prim::NumToTensor pnnx_6429 1 1 7440 other.33 aten::Int pnnx_6430 1 1 other.33 7442 prim::Constant pnnx_6431 0 1 22031 value=trunc aten::div pnnx_6432 3 1 B_.65 other.33 22031 7443 aten::Int pnnx_6433 1 1 7443 7444 prim::Constant pnnx_6434 0 1 22032 value=6 prim::ListConstruct pnnx_6435 5 1 7444 7442 22032 7411 7410 7445 prim::Constant pnnx_6437 0 1 22033 value=1 prim::Constant pnnx_6439 0 1 22034 value=0 prim::Constant pnnx_6441 0 1 22035 value=1 Tensor.view Tensor.view_1325 2 1 attn0.33 7445 7446 $input=attn0.33 $shape=7445 #attn0.33=(36,6,64,64)f32 #7446=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3280 2 1 attn_mask.33 22033 7447 $input=attn_mask.33 $dim=22033 #attn_mask.33=(36,64,64)f32 #7447=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3281 2 1 7447 22034 7448 $input=7447 $dim=22034 #7447=(36,1,64,64)f32 #7448=(1,36,1,64,64)f32 aten::add pnnx_6442 3 1 7446 7448 22035 attn1.33 #7446=(1,36,6,64,64)f32 #7448=(1,36,1,64,64)f32 #attn1.33=(1,36,6,64,64)f32 prim::Constant pnnx_6443 0 1 22036 value=-1 prim::Constant pnnx_6444 0 1 22037 value=6 prim::ListConstruct pnnx_6445 4 1 22036 22037 7409 7408 7450 Tensor.view Tensor.view_1326 2 1 attn1.33 7450 input.149 $input=attn1.33 $shape=7450 #attn1.33=(1,36,6,64,64)f32 #input.149=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.1.attn.softmax 1 1 input.149 7452 dim=-1 #input.149=(36,6,64,64)f32 #7452=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.attn.attn_drop 1 1 7452 7453 #7452=(36,6,64,64)f32 #7453=(36,6,64,64)f32 Tensor.select Tensor.select_745 3 1 qkv0.65 22019 22020 v.65 $input=qkv0.65 $dim=22019 $index=22020 #qkv0.65=(3,36,6,64,32)f32 #v.65=(36,6,64,32)f32 prim::Constant pnnx_6448 0 1 22038 value=1 prim::Constant pnnx_6449 0 1 22039 value=2 torch.matmul torch.matmul_2267 2 1 7453 v.65 7454 $input=7453 $other=v.65 #7453=(36,6,64,64)f32 #v.65=(36,6,64,32)f32 #7454=(36,6,64,32)f32 prim::ListConstruct pnnx_6451 3 1 7403 7407 7415 7456 torch.transpose torch.transpose_3034 3 1 7454 22038 22039 7455 $input=7454 $dim0=22038 $dim1=22039 #7454=(36,6,64,32)f32 #7455=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_497 2 1 7455 7456 input0.69 $input=7455 $shape=7456 #7455=(36,64,6,32)f32 #input0.69=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.1.attn.proj 1 1 input0.69 7458 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.69=(36,64,192)f32 #7458=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.attn.proj_drop 1 1 7458 7459 #7458=(36,64,192)f32 #7459=(36,64,192)f32 prim::Constant pnnx_6453 0 1 22040 value=-1 prim::Constant pnnx_6454 0 1 22041 value=8 prim::Constant pnnx_6455 0 1 22042 value=8 prim::ListConstruct pnnx_6456 4 1 22040 22041 22042 7349 7460 prim::Constant pnnx_6458 0 1 22043 value=8 prim::Constant pnnx_6459 0 1 22044 value=trunc aten::div pnnx_6460 3 1 H.1 22043 22044 7462 aten::Int pnnx_6461 1 1 7462 7463 prim::Constant pnnx_6462 0 1 22045 value=8 prim::Constant pnnx_6463 0 1 22046 value=trunc aten::div pnnx_6464 3 1 W.1 22045 22046 7464 aten::Int pnnx_6465 1 1 7464 7465 prim::Constant pnnx_6466 0 1 22047 value=1 prim::Constant pnnx_6467 0 1 22048 value=8 prim::Constant pnnx_6468 0 1 22049 value=8 prim::Constant pnnx_6469 0 1 22050 value=-1 prim::ListConstruct pnnx_6470 6 1 22047 7463 7465 22048 22049 22050 7466 prim::Constant pnnx_6472 0 1 22051 value=0 prim::Constant pnnx_6473 0 1 22052 value=1 prim::Constant pnnx_6474 0 1 22053 value=3 prim::Constant pnnx_6475 0 1 22054 value=2 prim::Constant pnnx_6476 0 1 22055 value=4 prim::Constant pnnx_6477 0 1 22056 value=5 prim::ListConstruct pnnx_6478 6 1 22051 22052 22053 22054 22055 22056 7468 Tensor.view Tensor.view_1327 2 1 7459 7460 windows.65 $input=7459 $shape=7460 #7459=(36,64,192)f32 #windows.65=(36,8,8,192)f32 Tensor.view Tensor.view_1328 2 1 windows.65 7466 x3.65 $input=windows.65 $shape=7466 #windows.65=(36,8,8,192)f32 #x3.65=(1,6,6,8,8,192)f32 prim::Constant pnnx_6482 0 1 22058 value=1 prim::Constant pnnx_6483 0 1 22059 value=-1 prim::ListConstruct pnnx_6484 4 1 22058 193 433 22059 7471 torch.permute torch.permute_2655 2 1 x3.65 7468 7469 $input=x3.65 $dims=7468 #x3.65=(1,6,6,8,8,192)f32 #7469=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_97 1 1 7469 7470 memory_format=torch.contiguous_format $input=7469 #7469=(1,6,8,6,8,192)f32 #7470=(1,6,8,6,8,192)f32 prim::Constant pnnx_6486 0 1 22060 value=4 prim::Constant pnnx_6487 0 1 22061 value=4 prim::ListConstruct pnnx_6488 2 1 22060 22061 7473 prim::Constant pnnx_6489 0 1 22062 value=1 prim::Constant pnnx_6490 0 1 22063 value=2 prim::ListConstruct pnnx_6491 2 1 22062 22063 7474 Tensor.view Tensor.view_1329 2 1 7470 7471 shifted_x.33 $input=7470 $shape=7471 #7470=(1,6,8,6,8,192)f32 #shifted_x.33=(1,48,48,192)f32 aten::mul pnnx_6493 2 1 H.1 W.1 7476 aten::Int pnnx_6494 1 1 7476 7477 prim::ListConstruct pnnx_6495 3 1 7344 7477 7348 7478 prim::Constant pnnx_6497 0 1 7480 value=None prim::Constant pnnx_6498 0 1 22064 value=1 torch.roll torch.roll_2451 3 1 shifted_x.33 7473 7474 x4.65 $input=shifted_x.33 $shifts=7473 $dims=7474 #shifted_x.33=(1,48,48,192)f32 #x4.65=(1,48,48,192)f32 Tensor.view Tensor.view_1330 2 1 x4.65 7478 x5.33 $input=x4.65 $shape=7478 #x4.65=(1,48,48,192)f32 #x5.33=(1,2304,192)f32 aten::add pnnx_6499 3 1 7323 x5.33 22064 input.151 #7323=(1,2304,192)f32 #x5.33=(1,2304,192)f32 #input.151=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.1.norm2 1 1 input.151 7482 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.151=(1,2304,192)f32 #7482=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.1.mlp.fc1 1 1 7482 7487 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #7482=(1,2304,192)f32 #7487=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.1.mlp.act 1 1 7487 7488 #7487=(1,2304,384)f32 #7488=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.mlp.drop 1 1 7488 7489 #7488=(1,2304,384)f32 #7489=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.1.mlp.fc2 1 1 7489 7490 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #7489=(1,2304,384)f32 #7490=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.mlp.drop 1 1 7490 7491 #7490=(1,2304,192)f32 #7491=(1,2304,192)f32 prim::Constant pnnx_6500 0 1 7492 value=None prim::Constant pnnx_6501 0 1 22065 value=1 aten::add pnnx_6502 3 1 input.151 7491 22065 7493 #input.151=(1,2304,192)f32 #7491=(1,2304,192)f32 #7493=(1,2304,192)f32 prim::Constant pnnx_6503 0 1 7494 value=trunc prim::Constant pnnx_6504 0 1 7495 value=8 prim::Constant pnnx_6505 0 1 7496 value=0 prim::Constant pnnx_6506 0 1 7497 value=2 prim::Constant pnnx_6507 0 1 7498 value=1 prim::Constant pnnx_6508 0 1 7499 value=3 prim::Constant pnnx_6509 0 1 7500 value=8 prim::Constant pnnx_6510 0 1 7501 value=4 prim::Constant pnnx_6511 0 1 7502 value=5 prim::Constant pnnx_6512 0 1 7503 value=-1 prim::Constant pnnx_6513 0 1 7504 value=64 aten::size pnnx_6514 2 1 7493 7496 7510 #7493=(1,2304,192)f32 prim::NumToTensor pnnx_6515 1 1 7510 B.81 aten::Int pnnx_6516 1 1 B.81 7512 aten::Int pnnx_6517 1 1 B.81 7513 aten::size pnnx_6518 2 1 7493 7497 7514 #7493=(1,2304,192)f32 prim::NumToTensor pnnx_6519 1 1 7514 C.139 aten::Int pnnx_6520 1 1 C.139 7516 aten::Int pnnx_6521 1 1 C.139 7517 aten::Int pnnx_6522 1 1 C.139 7518 aten::Int pnnx_6523 1 1 C.139 7519 nn.LayerNorm layers_dfe.5.residual_group.blocks.2.norm1 1 1 7493 7520 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #7493=(1,2304,192)f32 #7520=(1,2304,192)f32 prim::ListConstruct pnnx_6524 4 1 7513 190 430 7519 7521 prim::Constant pnnx_6526 0 1 22066 value=0 Tensor.view Tensor.view_1331 2 1 7520 7521 x.67 $input=7520 $shape=7521 #7520=(1,2304,192)f32 #x.67=(1,48,48,192)f32 aten::size pnnx_6527 2 1 x.67 22066 7523 #x.67=(1,48,48,192)f32 prim::NumToTensor pnnx_6528 1 1 7523 B0.67 aten::Int pnnx_6529 1 1 B0.67 7525 aten::size pnnx_6530 2 1 x.67 7498 7526 #x.67=(1,48,48,192)f32 prim::NumToTensor pnnx_6531 1 1 7526 7527 prim::Constant pnnx_6532 0 1 22067 value=2 aten::size pnnx_6533 2 1 x.67 22067 7528 #x.67=(1,48,48,192)f32 prim::NumToTensor pnnx_6534 1 1 7528 7529 aten::size pnnx_6535 2 1 x.67 7499 7530 #x.67=(1,48,48,192)f32 prim::NumToTensor pnnx_6536 1 1 7530 C0.67 aten::Int pnnx_6537 1 1 C0.67 7532 aten::Int pnnx_6538 1 1 C0.67 7533 aten::div pnnx_6539 3 1 7527 7495 7494 7534 aten::Int pnnx_6540 1 1 7534 7535 prim::Constant pnnx_6541 0 1 22068 value=8 prim::Constant pnnx_6542 0 1 22069 value=trunc aten::div pnnx_6543 3 1 7529 22068 22069 7536 aten::Int pnnx_6544 1 1 7536 7537 prim::Constant pnnx_6545 0 1 22070 value=8 prim::ListConstruct pnnx_6546 6 1 7525 7535 7500 7537 22070 7533 7538 prim::Constant pnnx_6548 0 1 22071 value=0 prim::Constant pnnx_6549 0 1 22072 value=1 prim::Constant pnnx_6550 0 1 22073 value=3 prim::Constant pnnx_6551 0 1 22074 value=2 prim::ListConstruct pnnx_6552 6 1 22071 22072 22073 22074 7501 7502 7540 Tensor.view Tensor.view_1332 2 1 x.67 7538 x0.67 $input=x.67 $shape=7538 #x.67=(1,48,48,192)f32 #x0.67=(1,6,8,6,8,192)f32 prim::Constant pnnx_6556 0 1 22076 value=8 prim::Constant pnnx_6557 0 1 22077 value=8 prim::ListConstruct pnnx_6558 4 1 7503 22076 22077 7532 7543 torch.permute torch.permute_2656 2 1 x0.67 7540 7541 $input=x0.67 $dims=7540 #x0.67=(1,6,8,6,8,192)f32 #7541=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_98 1 1 7541 7542 memory_format=torch.contiguous_format $input=7541 #7541=(1,6,6,8,8,192)f32 #7542=(1,6,6,8,8,192)f32 prim::Constant pnnx_6560 0 1 22078 value=-1 prim::ListConstruct pnnx_6561 3 1 22078 7504 7518 7545 prim::Constant pnnx_6563 0 1 7547 value=1.767767e-01 prim::Constant pnnx_6564 0 1 7548 value=trunc prim::Constant pnnx_6565 0 1 7549 value=6 prim::Constant pnnx_6566 0 1 7550 value=0 prim::Constant pnnx_6567 0 1 7551 value=1 prim::Constant pnnx_6568 0 1 7552 value=2 prim::Constant pnnx_6569 0 1 7553 value=3 prim::Constant pnnx_6570 0 1 7554 value=6 prim::Constant pnnx_6571 0 1 7555 value=4 prim::Constant pnnx_6572 0 1 7556 value=-2 prim::Constant pnnx_6573 0 1 7557 value=-1 prim::Constant pnnx_6574 0 1 7558 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.2.attn 0 1 relative_position_bias_table.67 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.67=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.2.attn 0 1 relative_position_index.67 @relative_position_index=(64,64)i64 #relative_position_index.67=(64,64)i64 Tensor.view Tensor.view_1333 2 1 7542 7543 x_windows.67 $input=7542 $shape=7543 #7542=(1,6,6,8,8,192)f32 #x_windows.67=(36,8,8,192)f32 Tensor.view Tensor.view_1334 2 1 x_windows.67 7545 x1.67 $input=x_windows.67 $shape=7545 #x_windows.67=(36,8,8,192)f32 #x1.67=(36,64,192)f32 aten::size pnnx_6575 2 1 x1.67 7550 7566 #x1.67=(36,64,192)f32 prim::NumToTensor pnnx_6576 1 1 7566 B_.67 aten::Int pnnx_6577 1 1 B_.67 7568 aten::Int pnnx_6578 1 1 B_.67 7569 aten::size pnnx_6579 2 1 x1.67 7551 7570 #x1.67=(36,64,192)f32 prim::NumToTensor pnnx_6580 1 1 7570 N.67 aten::Int pnnx_6581 1 1 N.67 7572 aten::Int pnnx_6582 1 1 N.67 7573 aten::size pnnx_6583 2 1 x1.67 7552 7574 #x1.67=(36,64,192)f32 prim::NumToTensor pnnx_6584 1 1 7574 C.141 aten::Int pnnx_6585 1 1 C.141 7576 nn.Linear layers_dfe.5.residual_group.blocks.2.attn.qkv 1 1 x1.67 7577 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.67=(36,64,192)f32 #7577=(36,64,576)f32 aten::div pnnx_6586 3 1 C.141 7549 7548 7578 aten::Int pnnx_6587 1 1 7578 7579 prim::ListConstruct pnnx_6588 5 1 7569 7573 7553 7554 7579 7580 prim::Constant pnnx_6590 0 1 22079 value=2 prim::Constant pnnx_6591 0 1 22080 value=0 prim::Constant pnnx_6592 0 1 22081 value=3 prim::Constant pnnx_6593 0 1 22082 value=1 prim::ListConstruct pnnx_6594 5 1 22079 22080 22081 22082 7555 7582 Tensor.reshape Tensor.reshape_498 2 1 7577 7580 7581 $input=7577 $shape=7580 #7577=(36,64,576)f32 #7581=(36,64,3,6,32)f32 prim::Constant pnnx_6596 0 1 22083 value=0 prim::Constant pnnx_6597 0 1 22084 value=0 prim::Constant pnnx_6599 0 1 22085 value=0 prim::Constant pnnx_6600 0 1 22086 value=1 prim::Constant pnnx_6602 0 1 22087 value=0 prim::Constant pnnx_6603 0 1 22088 value=2 torch.permute torch.permute_2657 2 1 7581 7582 qkv0.67 $input=7581 $dims=7582 #7581=(36,64,3,6,32)f32 #qkv0.67=(3,36,6,64,32)f32 Tensor.select Tensor.select_746 3 1 qkv0.67 22083 22084 q.67 $input=qkv0.67 $dim=22083 $index=22084 #qkv0.67=(3,36,6,64,32)f32 #q.67=(36,6,64,32)f32 aten::mul pnnx_6605 2 1 q.67 7547 q0.67 #q.67=(36,6,64,32)f32 #q0.67=(36,6,64,32)f32 Tensor.select Tensor.select_747 3 1 qkv0.67 22085 22086 k.67 $input=qkv0.67 $dim=22085 $index=22086 #qkv0.67=(3,36,6,64,32)f32 #k.67=(36,6,64,32)f32 prim::Constant pnnx_6608 0 1 22089 value=-1 prim::ListConstruct pnnx_6609 1 1 22089 7590 Tensor.view Tensor.view_1335 2 1 relative_position_index.67 7590 7591 $input=relative_position_index.67 $shape=7590 #relative_position_index.67=(64,64)i64 #7591=(4096)i64 prim::ListConstruct pnnx_6611 1 1 7591 7592 #7591=(4096)i64 prim::Constant pnnx_6613 0 1 22090 value=64 prim::Constant pnnx_6614 0 1 22091 value=-1 prim::ListConstruct pnnx_6615 3 1 7558 22090 22091 7594 Tensor.index Tensor.index_358 2 1 relative_position_bias_table.67 7592 7593 $input=relative_position_bias_table.67 $expr=7592 #relative_position_bias_table.67=(225,6)f32 #7593=(4096,6)f32 prim::Constant pnnx_6617 0 1 22092 value=2 prim::Constant pnnx_6618 0 1 22093 value=0 prim::Constant pnnx_6619 0 1 22094 value=1 prim::ListConstruct pnnx_6620 3 1 22092 22093 22094 7596 Tensor.view Tensor.view_1336 2 1 7593 7594 relative_position_bias.67 $input=7593 $shape=7594 #7593=(4096,6)f32 #relative_position_bias.67=(64,64,6)f32 prim::Constant pnnx_6624 0 1 22096 value=0 torch.permute torch.permute_2658 2 1 relative_position_bias.67 7596 7597 $input=relative_position_bias.67 $dims=7596 #relative_position_bias.67=(64,64,6)f32 #7597=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_99 1 1 7597 relative_position_bias0.67 memory_format=torch.contiguous_format $input=7597 #7597=(6,64,64)f32 #relative_position_bias0.67=(6,64,64)f32 prim::Constant pnnx_6626 0 1 22097 value=1 torch.transpose torch.transpose_3035 3 1 k.67 7556 7557 7588 $input=k.67 $dim0=7556 $dim1=7557 #k.67=(36,6,64,32)f32 #7588=(36,6,32,64)f32 torch.matmul torch.matmul_2268 2 1 q0.67 7588 attn.135 $input=q0.67 $other=7588 #q0.67=(36,6,64,32)f32 #7588=(36,6,32,64)f32 #attn.135=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3282 2 1 relative_position_bias0.67 22096 7599 $input=relative_position_bias0.67 $dim=22096 #relative_position_bias0.67=(6,64,64)f32 #7599=(1,6,64,64)f32 aten::add pnnx_6627 3 1 attn.135 7599 22097 input.153 #attn.135=(36,6,64,64)f32 #7599=(1,6,64,64)f32 #input.153=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.2.attn.softmax 1 1 input.153 7601 dim=-1 #input.153=(36,6,64,64)f32 #7601=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.attn.attn_drop 1 1 7601 7602 #7601=(36,6,64,64)f32 #7602=(36,6,64,64)f32 Tensor.select Tensor.select_748 3 1 qkv0.67 22087 22088 v.67 $input=qkv0.67 $dim=22087 $index=22088 #qkv0.67=(3,36,6,64,32)f32 #v.67=(36,6,64,32)f32 prim::Constant pnnx_6629 0 1 22098 value=1 prim::Constant pnnx_6630 0 1 22099 value=2 torch.matmul torch.matmul_2269 2 1 7602 v.67 7603 $input=7602 $other=v.67 #7602=(36,6,64,64)f32 #v.67=(36,6,64,32)f32 #7603=(36,6,64,32)f32 prim::ListConstruct pnnx_6632 3 1 7568 7572 7576 7605 torch.transpose torch.transpose_3036 3 1 7603 22098 22099 7604 $input=7603 $dim0=22098 $dim1=22099 #7603=(36,6,64,32)f32 #7604=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_499 2 1 7604 7605 input0.71 $input=7604 $shape=7605 #7604=(36,64,6,32)f32 #input0.71=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.2.attn.proj 1 1 input0.71 7607 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.71=(36,64,192)f32 #7607=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.attn.proj_drop 1 1 7607 7608 #7607=(36,64,192)f32 #7608=(36,64,192)f32 prim::Constant pnnx_6634 0 1 22100 value=-1 prim::Constant pnnx_6635 0 1 22101 value=8 prim::Constant pnnx_6636 0 1 22102 value=8 prim::ListConstruct pnnx_6637 4 1 22100 22101 22102 7517 7609 prim::Constant pnnx_6639 0 1 22103 value=8 prim::Constant pnnx_6640 0 1 22104 value=trunc aten::div pnnx_6641 3 1 H.1 22103 22104 7611 aten::Int pnnx_6642 1 1 7611 7612 prim::Constant pnnx_6643 0 1 22105 value=8 prim::Constant pnnx_6644 0 1 22106 value=trunc aten::div pnnx_6645 3 1 W.1 22105 22106 7613 aten::Int pnnx_6646 1 1 7613 7614 prim::Constant pnnx_6647 0 1 22107 value=1 prim::Constant pnnx_6648 0 1 22108 value=8 prim::Constant pnnx_6649 0 1 22109 value=8 prim::Constant pnnx_6650 0 1 22110 value=-1 prim::ListConstruct pnnx_6651 6 1 22107 7612 7614 22108 22109 22110 7615 prim::Constant pnnx_6653 0 1 22111 value=0 prim::Constant pnnx_6654 0 1 22112 value=1 prim::Constant pnnx_6655 0 1 22113 value=3 prim::Constant pnnx_6656 0 1 22114 value=2 prim::Constant pnnx_6657 0 1 22115 value=4 prim::Constant pnnx_6658 0 1 22116 value=5 prim::ListConstruct pnnx_6659 6 1 22111 22112 22113 22114 22115 22116 7617 Tensor.view Tensor.view_1337 2 1 7608 7609 windows.67 $input=7608 $shape=7609 #7608=(36,64,192)f32 #windows.67=(36,8,8,192)f32 Tensor.view Tensor.view_1338 2 1 windows.67 7615 x2.67 $input=windows.67 $shape=7615 #windows.67=(36,8,8,192)f32 #x2.67=(1,6,6,8,8,192)f32 prim::Constant pnnx_6663 0 1 22118 value=1 prim::Constant pnnx_6664 0 1 22119 value=-1 prim::ListConstruct pnnx_6665 4 1 22118 187 427 22119 7620 torch.permute torch.permute_2659 2 1 x2.67 7617 7618 $input=x2.67 $dims=7617 #x2.67=(1,6,6,8,8,192)f32 #7618=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_100 1 1 7618 7619 memory_format=torch.contiguous_format $input=7618 #7618=(1,6,8,6,8,192)f32 #7619=(1,6,8,6,8,192)f32 aten::mul pnnx_6667 2 1 H.1 W.1 7622 aten::Int pnnx_6668 1 1 7622 7623 prim::ListConstruct pnnx_6669 3 1 7512 7623 7516 7624 prim::Constant pnnx_6671 0 1 7626 value=None prim::Constant pnnx_6672 0 1 22120 value=1 Tensor.view Tensor.view_1339 2 1 7619 7620 x3.67 $input=7619 $shape=7620 #7619=(1,6,8,6,8,192)f32 #x3.67=(1,48,48,192)f32 Tensor.view Tensor.view_1340 2 1 x3.67 7624 x4.67 $input=x3.67 $shape=7624 #x3.67=(1,48,48,192)f32 #x4.67=(1,2304,192)f32 aten::add pnnx_6673 3 1 7493 x4.67 22120 input.155 #7493=(1,2304,192)f32 #x4.67=(1,2304,192)f32 #input.155=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.2.norm2 1 1 input.155 7628 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.155=(1,2304,192)f32 #7628=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.2.mlp.fc1 1 1 7628 7633 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #7628=(1,2304,192)f32 #7633=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.2.mlp.act 1 1 7633 7634 #7633=(1,2304,384)f32 #7634=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.mlp.drop 1 1 7634 7635 #7634=(1,2304,384)f32 #7635=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.2.mlp.fc2 1 1 7635 7636 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #7635=(1,2304,384)f32 #7636=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.mlp.drop 1 1 7636 7637 #7636=(1,2304,192)f32 #7637=(1,2304,192)f32 prim::Constant pnnx_6674 0 1 7638 value=None prim::Constant pnnx_6675 0 1 22121 value=1 aten::add pnnx_6676 3 1 input.155 7637 22121 7639 #input.155=(1,2304,192)f32 #7637=(1,2304,192)f32 #7639=(1,2304,192)f32 prim::Constant pnnx_6677 0 1 7640 value=trunc prim::Constant pnnx_6678 0 1 7641 value=8 prim::Constant pnnx_6679 0 1 7642 value=0 prim::Constant pnnx_6680 0 1 7643 value=2 prim::Constant pnnx_6681 0 1 7644 value=-4 prim::Constant pnnx_6682 0 1 7645 value=1 prim::Constant pnnx_6683 0 1 7646 value=3 prim::Constant pnnx_6684 0 1 7647 value=8 prim::Constant pnnx_6685 0 1 7648 value=4 prim::Constant pnnx_6686 0 1 7649 value=5 prim::Constant pnnx_6687 0 1 7650 value=-1 prim::Constant pnnx_6688 0 1 7651 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.3 0 1 attn_mask.35 @attn_mask=(36,64,64)f32 #attn_mask.35=(36,64,64)f32 aten::size pnnx_6689 2 1 7639 7642 7658 #7639=(1,2304,192)f32 prim::NumToTensor pnnx_6690 1 1 7658 B.83 aten::Int pnnx_6691 1 1 B.83 7660 aten::Int pnnx_6692 1 1 B.83 7661 aten::size pnnx_6693 2 1 7639 7643 7662 #7639=(1,2304,192)f32 prim::NumToTensor pnnx_6694 1 1 7662 C.143 aten::Int pnnx_6695 1 1 C.143 7664 aten::Int pnnx_6696 1 1 C.143 7665 aten::Int pnnx_6697 1 1 C.143 7666 aten::Int pnnx_6698 1 1 C.143 7667 nn.LayerNorm layers_dfe.5.residual_group.blocks.3.norm1 1 1 7639 7668 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #7639=(1,2304,192)f32 #7668=(1,2304,192)f32 prim::ListConstruct pnnx_6699 4 1 7661 184 424 7667 7669 prim::Constant pnnx_6701 0 1 22122 value=-4 prim::ListConstruct pnnx_6702 2 1 7644 22122 7671 prim::Constant pnnx_6703 0 1 22123 value=2 prim::ListConstruct pnnx_6704 2 1 7645 22123 7672 Tensor.view Tensor.view_1341 2 1 7668 7669 x.69 $input=7668 $shape=7669 #7668=(1,2304,192)f32 #x.69=(1,48,48,192)f32 prim::Constant pnnx_6706 0 1 22124 value=0 torch.roll torch.roll_2452 3 1 x.69 7671 7672 x0.69 $input=x.69 $shifts=7671 $dims=7672 #x.69=(1,48,48,192)f32 #x0.69=(1,48,48,192)f32 aten::size pnnx_6707 2 1 x0.69 22124 7674 #x0.69=(1,48,48,192)f32 prim::NumToTensor pnnx_6708 1 1 7674 B0.69 aten::Int pnnx_6709 1 1 B0.69 7676 prim::Constant pnnx_6710 0 1 22125 value=1 aten::size pnnx_6711 2 1 x0.69 22125 7677 #x0.69=(1,48,48,192)f32 prim::NumToTensor pnnx_6712 1 1 7677 7678 prim::Constant pnnx_6713 0 1 22126 value=2 aten::size pnnx_6714 2 1 x0.69 22126 7679 #x0.69=(1,48,48,192)f32 prim::NumToTensor pnnx_6715 1 1 7679 7680 aten::size pnnx_6716 2 1 x0.69 7646 7681 #x0.69=(1,48,48,192)f32 prim::NumToTensor pnnx_6717 1 1 7681 C0.69 aten::Int pnnx_6718 1 1 C0.69 7683 aten::Int pnnx_6719 1 1 C0.69 7684 aten::div pnnx_6720 3 1 7678 7641 7640 7685 aten::Int pnnx_6721 1 1 7685 7686 prim::Constant pnnx_6722 0 1 22127 value=8 prim::Constant pnnx_6723 0 1 22128 value=trunc aten::div pnnx_6724 3 1 7680 22127 22128 7687 aten::Int pnnx_6725 1 1 7687 7688 prim::Constant pnnx_6726 0 1 22129 value=8 prim::ListConstruct pnnx_6727 6 1 7676 7686 7647 7688 22129 7684 7689 prim::Constant pnnx_6729 0 1 22130 value=0 prim::Constant pnnx_6730 0 1 22131 value=1 prim::Constant pnnx_6731 0 1 22132 value=3 prim::Constant pnnx_6732 0 1 22133 value=2 prim::ListConstruct pnnx_6733 6 1 22130 22131 22132 22133 7648 7649 7691 Tensor.view Tensor.view_1342 2 1 x0.69 7689 x1.69 $input=x0.69 $shape=7689 #x0.69=(1,48,48,192)f32 #x1.69=(1,6,8,6,8,192)f32 prim::Constant pnnx_6737 0 1 22135 value=8 prim::Constant pnnx_6738 0 1 22136 value=8 prim::ListConstruct pnnx_6739 4 1 7650 22135 22136 7683 7694 torch.permute torch.permute_2660 2 1 x1.69 7691 7692 $input=x1.69 $dims=7691 #x1.69=(1,6,8,6,8,192)f32 #7692=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_101 1 1 7692 7693 memory_format=torch.contiguous_format $input=7692 #7692=(1,6,6,8,8,192)f32 #7693=(1,6,6,8,8,192)f32 prim::Constant pnnx_6741 0 1 22137 value=-1 prim::ListConstruct pnnx_6742 3 1 22137 7651 7666 7696 prim::Constant pnnx_6744 0 1 7698 value=1.767767e-01 prim::Constant pnnx_6745 0 1 7699 value=trunc prim::Constant pnnx_6746 0 1 7700 value=6 prim::Constant pnnx_6747 0 1 7701 value=0 prim::Constant pnnx_6748 0 1 7702 value=1 prim::Constant pnnx_6749 0 1 7703 value=2 prim::Constant pnnx_6750 0 1 7704 value=3 prim::Constant pnnx_6751 0 1 7705 value=6 prim::Constant pnnx_6752 0 1 7706 value=4 prim::Constant pnnx_6753 0 1 7707 value=-2 prim::Constant pnnx_6754 0 1 7708 value=-1 prim::Constant pnnx_6755 0 1 7709 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.3.attn 0 1 relative_position_bias_table.69 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.69=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.3.attn 0 1 relative_position_index.69 @relative_position_index=(64,64)i64 #relative_position_index.69=(64,64)i64 Tensor.view Tensor.view_1343 2 1 7693 7694 x_windows.69 $input=7693 $shape=7694 #7693=(1,6,6,8,8,192)f32 #x_windows.69=(36,8,8,192)f32 Tensor.view Tensor.view_1344 2 1 x_windows.69 7696 x2.69 $input=x_windows.69 $shape=7696 #x_windows.69=(36,8,8,192)f32 #x2.69=(36,64,192)f32 aten::size pnnx_6756 2 1 x2.69 7701 7717 #x2.69=(36,64,192)f32 prim::NumToTensor pnnx_6757 1 1 7717 B_.69 aten::Int pnnx_6758 1 1 B_.69 7719 aten::Int pnnx_6759 1 1 B_.69 7720 aten::size pnnx_6760 2 1 x2.69 7702 7721 #x2.69=(36,64,192)f32 prim::NumToTensor pnnx_6761 1 1 7721 N.69 aten::Int pnnx_6762 1 1 N.69 7723 aten::Int pnnx_6763 1 1 N.69 7724 aten::Int pnnx_6764 1 1 N.69 7725 aten::Int pnnx_6765 1 1 N.69 7726 aten::Int pnnx_6766 1 1 N.69 7727 aten::Int pnnx_6767 1 1 N.69 7728 aten::size pnnx_6768 2 1 x2.69 7703 7729 #x2.69=(36,64,192)f32 prim::NumToTensor pnnx_6769 1 1 7729 C.145 aten::Int pnnx_6770 1 1 C.145 7731 nn.Linear layers_dfe.5.residual_group.blocks.3.attn.qkv 1 1 x2.69 7732 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.69=(36,64,192)f32 #7732=(36,64,576)f32 aten::div pnnx_6771 3 1 C.145 7700 7699 7733 aten::Int pnnx_6772 1 1 7733 7734 prim::ListConstruct pnnx_6773 5 1 7720 7728 7704 7705 7734 7735 prim::Constant pnnx_6775 0 1 22138 value=2 prim::Constant pnnx_6776 0 1 22139 value=0 prim::Constant pnnx_6777 0 1 22140 value=3 prim::Constant pnnx_6778 0 1 22141 value=1 prim::ListConstruct pnnx_6779 5 1 22138 22139 22140 22141 7706 7737 Tensor.reshape Tensor.reshape_500 2 1 7732 7735 7736 $input=7732 $shape=7735 #7732=(36,64,576)f32 #7736=(36,64,3,6,32)f32 prim::Constant pnnx_6781 0 1 22142 value=0 prim::Constant pnnx_6782 0 1 22143 value=0 prim::Constant pnnx_6784 0 1 22144 value=0 prim::Constant pnnx_6785 0 1 22145 value=1 prim::Constant pnnx_6787 0 1 22146 value=0 prim::Constant pnnx_6788 0 1 22147 value=2 torch.permute torch.permute_2661 2 1 7736 7737 qkv0.69 $input=7736 $dims=7737 #7736=(36,64,3,6,32)f32 #qkv0.69=(3,36,6,64,32)f32 Tensor.select Tensor.select_749 3 1 qkv0.69 22142 22143 q.69 $input=qkv0.69 $dim=22142 $index=22143 #qkv0.69=(3,36,6,64,32)f32 #q.69=(36,6,64,32)f32 aten::mul pnnx_6790 2 1 q.69 7698 q0.69 #q.69=(36,6,64,32)f32 #q0.69=(36,6,64,32)f32 Tensor.select Tensor.select_750 3 1 qkv0.69 22144 22145 k.69 $input=qkv0.69 $dim=22144 $index=22145 #qkv0.69=(3,36,6,64,32)f32 #k.69=(36,6,64,32)f32 prim::Constant pnnx_6793 0 1 22148 value=-1 prim::ListConstruct pnnx_6794 1 1 22148 7745 Tensor.view Tensor.view_1345 2 1 relative_position_index.69 7745 7746 $input=relative_position_index.69 $shape=7745 #relative_position_index.69=(64,64)i64 #7746=(4096)i64 prim::ListConstruct pnnx_6796 1 1 7746 7747 #7746=(4096)i64 prim::Constant pnnx_6798 0 1 22149 value=64 prim::Constant pnnx_6799 0 1 22150 value=-1 prim::ListConstruct pnnx_6800 3 1 7709 22149 22150 7749 Tensor.index Tensor.index_359 2 1 relative_position_bias_table.69 7747 7748 $input=relative_position_bias_table.69 $expr=7747 #relative_position_bias_table.69=(225,6)f32 #7748=(4096,6)f32 prim::Constant pnnx_6802 0 1 22151 value=2 prim::Constant pnnx_6803 0 1 22152 value=0 prim::Constant pnnx_6804 0 1 22153 value=1 prim::ListConstruct pnnx_6805 3 1 22151 22152 22153 7751 Tensor.view Tensor.view_1346 2 1 7748 7749 relative_position_bias.69 $input=7748 $shape=7749 #7748=(4096,6)f32 #relative_position_bias.69=(64,64,6)f32 prim::Constant pnnx_6809 0 1 22155 value=0 torch.permute torch.permute_2662 2 1 relative_position_bias.69 7751 7752 $input=relative_position_bias.69 $dims=7751 #relative_position_bias.69=(64,64,6)f32 #7752=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_102 1 1 7752 relative_position_bias0.69 memory_format=torch.contiguous_format $input=7752 #7752=(6,64,64)f32 #relative_position_bias0.69=(6,64,64)f32 prim::Constant pnnx_6811 0 1 22156 value=1 torch.transpose torch.transpose_3037 3 1 k.69 7707 7708 7743 $input=k.69 $dim0=7707 $dim1=7708 #k.69=(36,6,64,32)f32 #7743=(36,6,32,64)f32 torch.matmul torch.matmul_2270 2 1 q0.69 7743 attn.139 $input=q0.69 $other=7743 #q0.69=(36,6,64,32)f32 #7743=(36,6,32,64)f32 #attn.139=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3283 2 1 relative_position_bias0.69 22155 7754 $input=relative_position_bias0.69 $dim=22155 #relative_position_bias0.69=(6,64,64)f32 #7754=(1,6,64,64)f32 aten::add pnnx_6812 3 1 attn.139 7754 22156 attn0.35 #attn.139=(36,6,64,64)f32 #7754=(1,6,64,64)f32 #attn0.35=(36,6,64,64)f32 prim::Constant pnnx_6813 0 1 22157 value=0 aten::size pnnx_6814 2 1 attn_mask.35 22157 7756 #attn_mask.35=(36,64,64)f32 prim::NumToTensor pnnx_6815 1 1 7756 other.35 aten::Int pnnx_6816 1 1 other.35 7758 prim::Constant pnnx_6817 0 1 22158 value=trunc aten::div pnnx_6818 3 1 B_.69 other.35 22158 7759 aten::Int pnnx_6819 1 1 7759 7760 prim::Constant pnnx_6820 0 1 22159 value=6 prim::ListConstruct pnnx_6821 5 1 7760 7758 22159 7727 7726 7761 prim::Constant pnnx_6823 0 1 22160 value=1 prim::Constant pnnx_6825 0 1 22161 value=0 prim::Constant pnnx_6827 0 1 22162 value=1 Tensor.view Tensor.view_1347 2 1 attn0.35 7761 7762 $input=attn0.35 $shape=7761 #attn0.35=(36,6,64,64)f32 #7762=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3284 2 1 attn_mask.35 22160 7763 $input=attn_mask.35 $dim=22160 #attn_mask.35=(36,64,64)f32 #7763=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3285 2 1 7763 22161 7764 $input=7763 $dim=22161 #7763=(36,1,64,64)f32 #7764=(1,36,1,64,64)f32 aten::add pnnx_6828 3 1 7762 7764 22162 attn1.35 #7762=(1,36,6,64,64)f32 #7764=(1,36,1,64,64)f32 #attn1.35=(1,36,6,64,64)f32 prim::Constant pnnx_6829 0 1 22163 value=-1 prim::Constant pnnx_6830 0 1 22164 value=6 prim::ListConstruct pnnx_6831 4 1 22163 22164 7725 7724 7766 Tensor.view Tensor.view_1348 2 1 attn1.35 7766 input.157 $input=attn1.35 $shape=7766 #attn1.35=(1,36,6,64,64)f32 #input.157=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.3.attn.softmax 1 1 input.157 7768 dim=-1 #input.157=(36,6,64,64)f32 #7768=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.attn.attn_drop 1 1 7768 7769 #7768=(36,6,64,64)f32 #7769=(36,6,64,64)f32 Tensor.select Tensor.select_751 3 1 qkv0.69 22146 22147 v.69 $input=qkv0.69 $dim=22146 $index=22147 #qkv0.69=(3,36,6,64,32)f32 #v.69=(36,6,64,32)f32 prim::Constant pnnx_6834 0 1 22165 value=1 prim::Constant pnnx_6835 0 1 22166 value=2 torch.matmul torch.matmul_2271 2 1 7769 v.69 7770 $input=7769 $other=v.69 #7769=(36,6,64,64)f32 #v.69=(36,6,64,32)f32 #7770=(36,6,64,32)f32 prim::ListConstruct pnnx_6837 3 1 7719 7723 7731 7772 torch.transpose torch.transpose_3038 3 1 7770 22165 22166 7771 $input=7770 $dim0=22165 $dim1=22166 #7770=(36,6,64,32)f32 #7771=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_501 2 1 7771 7772 input0.73 $input=7771 $shape=7772 #7771=(36,64,6,32)f32 #input0.73=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.3.attn.proj 1 1 input0.73 7774 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.73=(36,64,192)f32 #7774=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.attn.proj_drop 1 1 7774 7775 #7774=(36,64,192)f32 #7775=(36,64,192)f32 prim::Constant pnnx_6839 0 1 22167 value=-1 prim::Constant pnnx_6840 0 1 22168 value=8 prim::Constant pnnx_6841 0 1 22169 value=8 prim::ListConstruct pnnx_6842 4 1 22167 22168 22169 7665 7776 prim::Constant pnnx_6844 0 1 22170 value=8 prim::Constant pnnx_6845 0 1 22171 value=trunc aten::div pnnx_6846 3 1 H.1 22170 22171 7778 aten::Int pnnx_6847 1 1 7778 7779 prim::Constant pnnx_6848 0 1 22172 value=8 prim::Constant pnnx_6849 0 1 22173 value=trunc aten::div pnnx_6850 3 1 W.1 22172 22173 7780 aten::Int pnnx_6851 1 1 7780 7781 prim::Constant pnnx_6852 0 1 22174 value=1 prim::Constant pnnx_6853 0 1 22175 value=8 prim::Constant pnnx_6854 0 1 22176 value=8 prim::Constant pnnx_6855 0 1 22177 value=-1 prim::ListConstruct pnnx_6856 6 1 22174 7779 7781 22175 22176 22177 7782 prim::Constant pnnx_6858 0 1 22178 value=0 prim::Constant pnnx_6859 0 1 22179 value=1 prim::Constant pnnx_6860 0 1 22180 value=3 prim::Constant pnnx_6861 0 1 22181 value=2 prim::Constant pnnx_6862 0 1 22182 value=4 prim::Constant pnnx_6863 0 1 22183 value=5 prim::ListConstruct pnnx_6864 6 1 22178 22179 22180 22181 22182 22183 7784 Tensor.view Tensor.view_1349 2 1 7775 7776 windows.69 $input=7775 $shape=7776 #7775=(36,64,192)f32 #windows.69=(36,8,8,192)f32 Tensor.view Tensor.view_1350 2 1 windows.69 7782 x3.69 $input=windows.69 $shape=7782 #windows.69=(36,8,8,192)f32 #x3.69=(1,6,6,8,8,192)f32 prim::Constant pnnx_6868 0 1 22185 value=1 prim::Constant pnnx_6869 0 1 22186 value=-1 prim::ListConstruct pnnx_6870 4 1 22185 181 421 22186 7787 torch.permute torch.permute_2663 2 1 x3.69 7784 7785 $input=x3.69 $dims=7784 #x3.69=(1,6,6,8,8,192)f32 #7785=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_103 1 1 7785 7786 memory_format=torch.contiguous_format $input=7785 #7785=(1,6,8,6,8,192)f32 #7786=(1,6,8,6,8,192)f32 prim::Constant pnnx_6872 0 1 22187 value=4 prim::Constant pnnx_6873 0 1 22188 value=4 prim::ListConstruct pnnx_6874 2 1 22187 22188 7789 prim::Constant pnnx_6875 0 1 22189 value=1 prim::Constant pnnx_6876 0 1 22190 value=2 prim::ListConstruct pnnx_6877 2 1 22189 22190 7790 Tensor.view Tensor.view_1351 2 1 7786 7787 shifted_x.35 $input=7786 $shape=7787 #7786=(1,6,8,6,8,192)f32 #shifted_x.35=(1,48,48,192)f32 aten::mul pnnx_6879 2 1 H.1 W.1 7792 aten::Int pnnx_6880 1 1 7792 7793 prim::ListConstruct pnnx_6881 3 1 7660 7793 7664 7794 prim::Constant pnnx_6883 0 1 7796 value=None prim::Constant pnnx_6884 0 1 22191 value=1 torch.roll torch.roll_2453 3 1 shifted_x.35 7789 7790 x4.69 $input=shifted_x.35 $shifts=7789 $dims=7790 #shifted_x.35=(1,48,48,192)f32 #x4.69=(1,48,48,192)f32 Tensor.view Tensor.view_1352 2 1 x4.69 7794 x5.35 $input=x4.69 $shape=7794 #x4.69=(1,48,48,192)f32 #x5.35=(1,2304,192)f32 aten::add pnnx_6885 3 1 7639 x5.35 22191 input.159 #7639=(1,2304,192)f32 #x5.35=(1,2304,192)f32 #input.159=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.3.norm2 1 1 input.159 7798 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.159=(1,2304,192)f32 #7798=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.3.mlp.fc1 1 1 7798 7803 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #7798=(1,2304,192)f32 #7803=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.3.mlp.act 1 1 7803 7804 #7803=(1,2304,384)f32 #7804=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.mlp.drop 1 1 7804 7805 #7804=(1,2304,384)f32 #7805=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.3.mlp.fc2 1 1 7805 7806 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #7805=(1,2304,384)f32 #7806=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.mlp.drop 1 1 7806 7807 #7806=(1,2304,192)f32 #7807=(1,2304,192)f32 prim::Constant pnnx_6886 0 1 7808 value=None prim::Constant pnnx_6887 0 1 22192 value=1 aten::add pnnx_6888 3 1 input.159 7807 22192 7809 #input.159=(1,2304,192)f32 #7807=(1,2304,192)f32 #7809=(1,2304,192)f32 prim::Constant pnnx_6889 0 1 7810 value=trunc prim::Constant pnnx_6890 0 1 7811 value=8 prim::Constant pnnx_6891 0 1 7812 value=0 prim::Constant pnnx_6892 0 1 7813 value=2 prim::Constant pnnx_6893 0 1 7814 value=1 prim::Constant pnnx_6894 0 1 7815 value=3 prim::Constant pnnx_6895 0 1 7816 value=8 prim::Constant pnnx_6896 0 1 7817 value=4 prim::Constant pnnx_6897 0 1 7818 value=5 prim::Constant pnnx_6898 0 1 7819 value=-1 prim::Constant pnnx_6899 0 1 7820 value=64 aten::size pnnx_6900 2 1 7809 7812 7826 #7809=(1,2304,192)f32 prim::NumToTensor pnnx_6901 1 1 7826 B.85 aten::Int pnnx_6902 1 1 B.85 7828 aten::Int pnnx_6903 1 1 B.85 7829 aten::size pnnx_6904 2 1 7809 7813 7830 #7809=(1,2304,192)f32 prim::NumToTensor pnnx_6905 1 1 7830 C.147 aten::Int pnnx_6906 1 1 C.147 7832 aten::Int pnnx_6907 1 1 C.147 7833 aten::Int pnnx_6908 1 1 C.147 7834 aten::Int pnnx_6909 1 1 C.147 7835 nn.LayerNorm layers_dfe.5.residual_group.blocks.4.norm1 1 1 7809 7836 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #7809=(1,2304,192)f32 #7836=(1,2304,192)f32 prim::ListConstruct pnnx_6910 4 1 7829 178 418 7835 7837 prim::Constant pnnx_6912 0 1 22193 value=0 Tensor.view Tensor.view_1353 2 1 7836 7837 x.71 $input=7836 $shape=7837 #7836=(1,2304,192)f32 #x.71=(1,48,48,192)f32 aten::size pnnx_6913 2 1 x.71 22193 7839 #x.71=(1,48,48,192)f32 prim::NumToTensor pnnx_6914 1 1 7839 B0.71 aten::Int pnnx_6915 1 1 B0.71 7841 aten::size pnnx_6916 2 1 x.71 7814 7842 #x.71=(1,48,48,192)f32 prim::NumToTensor pnnx_6917 1 1 7842 7843 prim::Constant pnnx_6918 0 1 22194 value=2 aten::size pnnx_6919 2 1 x.71 22194 7844 #x.71=(1,48,48,192)f32 prim::NumToTensor pnnx_6920 1 1 7844 7845 aten::size pnnx_6921 2 1 x.71 7815 7846 #x.71=(1,48,48,192)f32 prim::NumToTensor pnnx_6922 1 1 7846 C0.71 aten::Int pnnx_6923 1 1 C0.71 7848 aten::Int pnnx_6924 1 1 C0.71 7849 aten::div pnnx_6925 3 1 7843 7811 7810 7850 aten::Int pnnx_6926 1 1 7850 7851 prim::Constant pnnx_6927 0 1 22195 value=8 prim::Constant pnnx_6928 0 1 22196 value=trunc aten::div pnnx_6929 3 1 7845 22195 22196 7852 aten::Int pnnx_6930 1 1 7852 7853 prim::Constant pnnx_6931 0 1 22197 value=8 prim::ListConstruct pnnx_6932 6 1 7841 7851 7816 7853 22197 7849 7854 prim::Constant pnnx_6934 0 1 22198 value=0 prim::Constant pnnx_6935 0 1 22199 value=1 prim::Constant pnnx_6936 0 1 22200 value=3 prim::Constant pnnx_6937 0 1 22201 value=2 prim::ListConstruct pnnx_6938 6 1 22198 22199 22200 22201 7817 7818 7856 Tensor.view Tensor.view_1354 2 1 x.71 7854 x0.71 $input=x.71 $shape=7854 #x.71=(1,48,48,192)f32 #x0.71=(1,6,8,6,8,192)f32 prim::Constant pnnx_6942 0 1 22203 value=8 prim::Constant pnnx_6943 0 1 22204 value=8 prim::ListConstruct pnnx_6944 4 1 7819 22203 22204 7848 7859 torch.permute torch.permute_2664 2 1 x0.71 7856 7857 $input=x0.71 $dims=7856 #x0.71=(1,6,8,6,8,192)f32 #7857=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_104 1 1 7857 7858 memory_format=torch.contiguous_format $input=7857 #7857=(1,6,6,8,8,192)f32 #7858=(1,6,6,8,8,192)f32 prim::Constant pnnx_6946 0 1 22205 value=-1 prim::ListConstruct pnnx_6947 3 1 22205 7820 7834 7861 prim::Constant pnnx_6949 0 1 7863 value=1.767767e-01 prim::Constant pnnx_6950 0 1 7864 value=trunc prim::Constant pnnx_6951 0 1 7865 value=6 prim::Constant pnnx_6952 0 1 7866 value=0 prim::Constant pnnx_6953 0 1 7867 value=1 prim::Constant pnnx_6954 0 1 7868 value=2 prim::Constant pnnx_6955 0 1 7869 value=3 prim::Constant pnnx_6956 0 1 7870 value=6 prim::Constant pnnx_6957 0 1 7871 value=4 prim::Constant pnnx_6958 0 1 7872 value=-2 prim::Constant pnnx_6959 0 1 7873 value=-1 prim::Constant pnnx_6960 0 1 7874 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.4.attn 0 1 relative_position_bias_table.71 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.71=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.4.attn 0 1 relative_position_index.71 @relative_position_index=(64,64)i64 #relative_position_index.71=(64,64)i64 Tensor.view Tensor.view_1355 2 1 7858 7859 x_windows.71 $input=7858 $shape=7859 #7858=(1,6,6,8,8,192)f32 #x_windows.71=(36,8,8,192)f32 Tensor.view Tensor.view_1356 2 1 x_windows.71 7861 x1.71 $input=x_windows.71 $shape=7861 #x_windows.71=(36,8,8,192)f32 #x1.71=(36,64,192)f32 aten::size pnnx_6961 2 1 x1.71 7866 7882 #x1.71=(36,64,192)f32 prim::NumToTensor pnnx_6962 1 1 7882 B_.71 aten::Int pnnx_6963 1 1 B_.71 7884 aten::Int pnnx_6964 1 1 B_.71 7885 aten::size pnnx_6965 2 1 x1.71 7867 7886 #x1.71=(36,64,192)f32 prim::NumToTensor pnnx_6966 1 1 7886 N.71 aten::Int pnnx_6967 1 1 N.71 7888 aten::Int pnnx_6968 1 1 N.71 7889 aten::size pnnx_6969 2 1 x1.71 7868 7890 #x1.71=(36,64,192)f32 prim::NumToTensor pnnx_6970 1 1 7890 C.149 aten::Int pnnx_6971 1 1 C.149 7892 nn.Linear layers_dfe.5.residual_group.blocks.4.attn.qkv 1 1 x1.71 7893 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.71=(36,64,192)f32 #7893=(36,64,576)f32 aten::div pnnx_6972 3 1 C.149 7865 7864 7894 aten::Int pnnx_6973 1 1 7894 7895 prim::ListConstruct pnnx_6974 5 1 7885 7889 7869 7870 7895 7896 prim::Constant pnnx_6976 0 1 22206 value=2 prim::Constant pnnx_6977 0 1 22207 value=0 prim::Constant pnnx_6978 0 1 22208 value=3 prim::Constant pnnx_6979 0 1 22209 value=1 prim::ListConstruct pnnx_6980 5 1 22206 22207 22208 22209 7871 7898 Tensor.reshape Tensor.reshape_502 2 1 7893 7896 7897 $input=7893 $shape=7896 #7893=(36,64,576)f32 #7897=(36,64,3,6,32)f32 prim::Constant pnnx_6982 0 1 22210 value=0 prim::Constant pnnx_6983 0 1 22211 value=0 prim::Constant pnnx_6985 0 1 22212 value=0 prim::Constant pnnx_6986 0 1 22213 value=1 prim::Constant pnnx_6988 0 1 22214 value=0 prim::Constant pnnx_6989 0 1 22215 value=2 torch.permute torch.permute_2665 2 1 7897 7898 qkv0.71 $input=7897 $dims=7898 #7897=(36,64,3,6,32)f32 #qkv0.71=(3,36,6,64,32)f32 Tensor.select Tensor.select_752 3 1 qkv0.71 22210 22211 q.71 $input=qkv0.71 $dim=22210 $index=22211 #qkv0.71=(3,36,6,64,32)f32 #q.71=(36,6,64,32)f32 aten::mul pnnx_6991 2 1 q.71 7863 q0.71 #q.71=(36,6,64,32)f32 #q0.71=(36,6,64,32)f32 Tensor.select Tensor.select_753 3 1 qkv0.71 22212 22213 k.71 $input=qkv0.71 $dim=22212 $index=22213 #qkv0.71=(3,36,6,64,32)f32 #k.71=(36,6,64,32)f32 prim::Constant pnnx_6994 0 1 22216 value=-1 prim::ListConstruct pnnx_6995 1 1 22216 7906 Tensor.view Tensor.view_1357 2 1 relative_position_index.71 7906 7907 $input=relative_position_index.71 $shape=7906 #relative_position_index.71=(64,64)i64 #7907=(4096)i64 prim::ListConstruct pnnx_6997 1 1 7907 7908 #7907=(4096)i64 prim::Constant pnnx_6999 0 1 22217 value=64 prim::Constant pnnx_7000 0 1 22218 value=-1 prim::ListConstruct pnnx_7001 3 1 7874 22217 22218 7910 Tensor.index Tensor.index_360 2 1 relative_position_bias_table.71 7908 7909 $input=relative_position_bias_table.71 $expr=7908 #relative_position_bias_table.71=(225,6)f32 #7909=(4096,6)f32 prim::Constant pnnx_7003 0 1 22219 value=2 prim::Constant pnnx_7004 0 1 22220 value=0 prim::Constant pnnx_7005 0 1 22221 value=1 prim::ListConstruct pnnx_7006 3 1 22219 22220 22221 7912 Tensor.view Tensor.view_1358 2 1 7909 7910 relative_position_bias.71 $input=7909 $shape=7910 #7909=(4096,6)f32 #relative_position_bias.71=(64,64,6)f32 prim::Constant pnnx_7010 0 1 22223 value=0 torch.permute torch.permute_2666 2 1 relative_position_bias.71 7912 7913 $input=relative_position_bias.71 $dims=7912 #relative_position_bias.71=(64,64,6)f32 #7913=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_105 1 1 7913 relative_position_bias0.71 memory_format=torch.contiguous_format $input=7913 #7913=(6,64,64)f32 #relative_position_bias0.71=(6,64,64)f32 prim::Constant pnnx_7012 0 1 22224 value=1 torch.transpose torch.transpose_3039 3 1 k.71 7872 7873 7904 $input=k.71 $dim0=7872 $dim1=7873 #k.71=(36,6,64,32)f32 #7904=(36,6,32,64)f32 torch.matmul torch.matmul_2272 2 1 q0.71 7904 attn.143 $input=q0.71 $other=7904 #q0.71=(36,6,64,32)f32 #7904=(36,6,32,64)f32 #attn.143=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3286 2 1 relative_position_bias0.71 22223 7915 $input=relative_position_bias0.71 $dim=22223 #relative_position_bias0.71=(6,64,64)f32 #7915=(1,6,64,64)f32 aten::add pnnx_7013 3 1 attn.143 7915 22224 input.161 #attn.143=(36,6,64,64)f32 #7915=(1,6,64,64)f32 #input.161=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.4.attn.softmax 1 1 input.161 7917 dim=-1 #input.161=(36,6,64,64)f32 #7917=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.attn.attn_drop 1 1 7917 7918 #7917=(36,6,64,64)f32 #7918=(36,6,64,64)f32 Tensor.select Tensor.select_754 3 1 qkv0.71 22214 22215 v.71 $input=qkv0.71 $dim=22214 $index=22215 #qkv0.71=(3,36,6,64,32)f32 #v.71=(36,6,64,32)f32 prim::Constant pnnx_7015 0 1 22225 value=1 prim::Constant pnnx_7016 0 1 22226 value=2 torch.matmul torch.matmul_2273 2 1 7918 v.71 7919 $input=7918 $other=v.71 #7918=(36,6,64,64)f32 #v.71=(36,6,64,32)f32 #7919=(36,6,64,32)f32 prim::ListConstruct pnnx_7018 3 1 7884 7888 7892 7921 torch.transpose torch.transpose_3040 3 1 7919 22225 22226 7920 $input=7919 $dim0=22225 $dim1=22226 #7919=(36,6,64,32)f32 #7920=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_503 2 1 7920 7921 input0.75 $input=7920 $shape=7921 #7920=(36,64,6,32)f32 #input0.75=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.4.attn.proj 1 1 input0.75 7923 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.75=(36,64,192)f32 #7923=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.attn.proj_drop 1 1 7923 7924 #7923=(36,64,192)f32 #7924=(36,64,192)f32 prim::Constant pnnx_7020 0 1 22227 value=-1 prim::Constant pnnx_7021 0 1 22228 value=8 prim::Constant pnnx_7022 0 1 22229 value=8 prim::ListConstruct pnnx_7023 4 1 22227 22228 22229 7833 7925 prim::Constant pnnx_7025 0 1 22230 value=8 prim::Constant pnnx_7026 0 1 22231 value=trunc aten::div pnnx_7027 3 1 H.1 22230 22231 7927 aten::Int pnnx_7028 1 1 7927 7928 prim::Constant pnnx_7029 0 1 22232 value=8 prim::Constant pnnx_7030 0 1 22233 value=trunc aten::div pnnx_7031 3 1 W.1 22232 22233 7929 aten::Int pnnx_7032 1 1 7929 7930 prim::Constant pnnx_7033 0 1 22234 value=1 prim::Constant pnnx_7034 0 1 22235 value=8 prim::Constant pnnx_7035 0 1 22236 value=8 prim::Constant pnnx_7036 0 1 22237 value=-1 prim::ListConstruct pnnx_7037 6 1 22234 7928 7930 22235 22236 22237 7931 prim::Constant pnnx_7039 0 1 22238 value=0 prim::Constant pnnx_7040 0 1 22239 value=1 prim::Constant pnnx_7041 0 1 22240 value=3 prim::Constant pnnx_7042 0 1 22241 value=2 prim::Constant pnnx_7043 0 1 22242 value=4 prim::Constant pnnx_7044 0 1 22243 value=5 prim::ListConstruct pnnx_7045 6 1 22238 22239 22240 22241 22242 22243 7933 Tensor.view Tensor.view_1359 2 1 7924 7925 windows.71 $input=7924 $shape=7925 #7924=(36,64,192)f32 #windows.71=(36,8,8,192)f32 Tensor.view Tensor.view_1360 2 1 windows.71 7931 x2.71 $input=windows.71 $shape=7931 #windows.71=(36,8,8,192)f32 #x2.71=(1,6,6,8,8,192)f32 prim::Constant pnnx_7049 0 1 22245 value=1 prim::Constant pnnx_7050 0 1 22246 value=-1 prim::ListConstruct pnnx_7051 4 1 22245 175 415 22246 7936 torch.permute torch.permute_2667 2 1 x2.71 7933 7934 $input=x2.71 $dims=7933 #x2.71=(1,6,6,8,8,192)f32 #7934=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_106 1 1 7934 7935 memory_format=torch.contiguous_format $input=7934 #7934=(1,6,8,6,8,192)f32 #7935=(1,6,8,6,8,192)f32 aten::mul pnnx_7053 2 1 H.1 W.1 7938 aten::Int pnnx_7054 1 1 7938 7939 prim::ListConstruct pnnx_7055 3 1 7828 7939 7832 7940 prim::Constant pnnx_7057 0 1 7942 value=None prim::Constant pnnx_7058 0 1 22247 value=1 Tensor.view Tensor.view_1361 2 1 7935 7936 x3.71 $input=7935 $shape=7936 #7935=(1,6,8,6,8,192)f32 #x3.71=(1,48,48,192)f32 Tensor.view Tensor.view_1362 2 1 x3.71 7940 x4.71 $input=x3.71 $shape=7940 #x3.71=(1,48,48,192)f32 #x4.71=(1,2304,192)f32 aten::add pnnx_7059 3 1 7809 x4.71 22247 input.163 #7809=(1,2304,192)f32 #x4.71=(1,2304,192)f32 #input.163=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.4.norm2 1 1 input.163 7944 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.163=(1,2304,192)f32 #7944=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.4.mlp.fc1 1 1 7944 7949 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #7944=(1,2304,192)f32 #7949=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.4.mlp.act 1 1 7949 7950 #7949=(1,2304,384)f32 #7950=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.mlp.drop 1 1 7950 7951 #7950=(1,2304,384)f32 #7951=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.4.mlp.fc2 1 1 7951 7952 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #7951=(1,2304,384)f32 #7952=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.mlp.drop 1 1 7952 7953 #7952=(1,2304,192)f32 #7953=(1,2304,192)f32 prim::Constant pnnx_7060 0 1 7954 value=None prim::Constant pnnx_7061 0 1 22248 value=1 aten::add pnnx_7062 3 1 input.163 7953 22248 7955 #input.163=(1,2304,192)f32 #7953=(1,2304,192)f32 #7955=(1,2304,192)f32 prim::Constant pnnx_7063 0 1 7956 value=trunc prim::Constant pnnx_7064 0 1 7957 value=8 prim::Constant pnnx_7065 0 1 7958 value=0 prim::Constant pnnx_7066 0 1 7959 value=2 prim::Constant pnnx_7067 0 1 7960 value=-4 prim::Constant pnnx_7068 0 1 7961 value=1 prim::Constant pnnx_7069 0 1 7962 value=3 prim::Constant pnnx_7070 0 1 7963 value=8 prim::Constant pnnx_7071 0 1 7964 value=4 prim::Constant pnnx_7072 0 1 7965 value=5 prim::Constant pnnx_7073 0 1 7966 value=-1 prim::Constant pnnx_7074 0 1 7967 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.5 0 1 attn_mask.37 @attn_mask=(36,64,64)f32 #attn_mask.37=(36,64,64)f32 aten::size pnnx_7075 2 1 7955 7958 7974 #7955=(1,2304,192)f32 prim::NumToTensor pnnx_7076 1 1 7974 B.87 aten::Int pnnx_7077 1 1 B.87 7976 aten::Int pnnx_7078 1 1 B.87 7977 aten::size pnnx_7079 2 1 7955 7959 7978 #7955=(1,2304,192)f32 prim::NumToTensor pnnx_7080 1 1 7978 C.151 aten::Int pnnx_7081 1 1 C.151 7980 aten::Int pnnx_7082 1 1 C.151 7981 aten::Int pnnx_7083 1 1 C.151 7982 aten::Int pnnx_7084 1 1 C.151 7983 nn.LayerNorm layers_dfe.5.residual_group.blocks.5.norm1 1 1 7955 7984 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #7955=(1,2304,192)f32 #7984=(1,2304,192)f32 prim::ListConstruct pnnx_7085 4 1 7977 172 412 7983 7985 prim::Constant pnnx_7087 0 1 22249 value=-4 prim::ListConstruct pnnx_7088 2 1 7960 22249 7987 prim::Constant pnnx_7089 0 1 22250 value=2 prim::ListConstruct pnnx_7090 2 1 7961 22250 7988 Tensor.view Tensor.view_1363 2 1 7984 7985 x.73 $input=7984 $shape=7985 #7984=(1,2304,192)f32 #x.73=(1,48,48,192)f32 prim::Constant pnnx_7092 0 1 22251 value=0 torch.roll torch.roll_2454 3 1 x.73 7987 7988 x0.73 $input=x.73 $shifts=7987 $dims=7988 #x.73=(1,48,48,192)f32 #x0.73=(1,48,48,192)f32 aten::size pnnx_7093 2 1 x0.73 22251 7990 #x0.73=(1,48,48,192)f32 prim::NumToTensor pnnx_7094 1 1 7990 B0.73 aten::Int pnnx_7095 1 1 B0.73 7992 prim::Constant pnnx_7096 0 1 22252 value=1 aten::size pnnx_7097 2 1 x0.73 22252 7993 #x0.73=(1,48,48,192)f32 prim::NumToTensor pnnx_7098 1 1 7993 7994 prim::Constant pnnx_7099 0 1 22253 value=2 aten::size pnnx_7100 2 1 x0.73 22253 7995 #x0.73=(1,48,48,192)f32 prim::NumToTensor pnnx_7101 1 1 7995 7996 aten::size pnnx_7102 2 1 x0.73 7962 7997 #x0.73=(1,48,48,192)f32 prim::NumToTensor pnnx_7103 1 1 7997 C0.73 aten::Int pnnx_7104 1 1 C0.73 7999 aten::Int pnnx_7105 1 1 C0.73 8000 aten::div pnnx_7106 3 1 7994 7957 7956 8001 aten::Int pnnx_7107 1 1 8001 8002 prim::Constant pnnx_7108 0 1 22254 value=8 prim::Constant pnnx_7109 0 1 22255 value=trunc aten::div pnnx_7110 3 1 7996 22254 22255 8003 aten::Int pnnx_7111 1 1 8003 8004 prim::Constant pnnx_7112 0 1 22256 value=8 prim::ListConstruct pnnx_7113 6 1 7992 8002 7963 8004 22256 8000 8005 prim::Constant pnnx_7115 0 1 22257 value=0 prim::Constant pnnx_7116 0 1 22258 value=1 prim::Constant pnnx_7117 0 1 22259 value=3 prim::Constant pnnx_7118 0 1 22260 value=2 prim::ListConstruct pnnx_7119 6 1 22257 22258 22259 22260 7964 7965 8007 Tensor.view Tensor.view_1364 2 1 x0.73 8005 x1.73 $input=x0.73 $shape=8005 #x0.73=(1,48,48,192)f32 #x1.73=(1,6,8,6,8,192)f32 prim::Constant pnnx_7123 0 1 22262 value=8 prim::Constant pnnx_7124 0 1 22263 value=8 prim::ListConstruct pnnx_7125 4 1 7966 22262 22263 7999 8010 torch.permute torch.permute_2668 2 1 x1.73 8007 8008 $input=x1.73 $dims=8007 #x1.73=(1,6,8,6,8,192)f32 #8008=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_107 1 1 8008 8009 memory_format=torch.contiguous_format $input=8008 #8008=(1,6,6,8,8,192)f32 #8009=(1,6,6,8,8,192)f32 prim::Constant pnnx_7127 0 1 22264 value=-1 prim::ListConstruct pnnx_7128 3 1 22264 7967 7982 8012 prim::Constant pnnx_7130 0 1 8014 value=1.767767e-01 prim::Constant pnnx_7131 0 1 8015 value=trunc prim::Constant pnnx_7132 0 1 8016 value=6 prim::Constant pnnx_7133 0 1 8017 value=0 prim::Constant pnnx_7134 0 1 8018 value=1 prim::Constant pnnx_7135 0 1 8019 value=2 prim::Constant pnnx_7136 0 1 8020 value=3 prim::Constant pnnx_7137 0 1 8021 value=6 prim::Constant pnnx_7138 0 1 8022 value=4 prim::Constant pnnx_7139 0 1 8023 value=-2 prim::Constant pnnx_7140 0 1 8024 value=-1 prim::Constant pnnx_7141 0 1 8025 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.5.attn 0 1 relative_position_bias_table.73 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.73=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.5.attn 0 1 relative_position_index.73 @relative_position_index=(64,64)i64 #relative_position_index.73=(64,64)i64 Tensor.view Tensor.view_1365 2 1 8009 8010 x_windows.73 $input=8009 $shape=8010 #8009=(1,6,6,8,8,192)f32 #x_windows.73=(36,8,8,192)f32 Tensor.view Tensor.view_1366 2 1 x_windows.73 8012 x2.73 $input=x_windows.73 $shape=8012 #x_windows.73=(36,8,8,192)f32 #x2.73=(36,64,192)f32 aten::size pnnx_7142 2 1 x2.73 8017 8033 #x2.73=(36,64,192)f32 prim::NumToTensor pnnx_7143 1 1 8033 B_.73 aten::Int pnnx_7144 1 1 B_.73 8035 aten::Int pnnx_7145 1 1 B_.73 8036 aten::size pnnx_7146 2 1 x2.73 8018 8037 #x2.73=(36,64,192)f32 prim::NumToTensor pnnx_7147 1 1 8037 N.73 aten::Int pnnx_7148 1 1 N.73 8039 aten::Int pnnx_7149 1 1 N.73 8040 aten::Int pnnx_7150 1 1 N.73 8041 aten::Int pnnx_7151 1 1 N.73 8042 aten::Int pnnx_7152 1 1 N.73 8043 aten::Int pnnx_7153 1 1 N.73 8044 aten::size pnnx_7154 2 1 x2.73 8019 8045 #x2.73=(36,64,192)f32 prim::NumToTensor pnnx_7155 1 1 8045 C.153 aten::Int pnnx_7156 1 1 C.153 8047 nn.Linear layers_dfe.5.residual_group.blocks.5.attn.qkv 1 1 x2.73 8048 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.73=(36,64,192)f32 #8048=(36,64,576)f32 aten::div pnnx_7157 3 1 C.153 8016 8015 8049 aten::Int pnnx_7158 1 1 8049 8050 prim::ListConstruct pnnx_7159 5 1 8036 8044 8020 8021 8050 8051 prim::Constant pnnx_7161 0 1 22265 value=2 prim::Constant pnnx_7162 0 1 22266 value=0 prim::Constant pnnx_7163 0 1 22267 value=3 prim::Constant pnnx_7164 0 1 22268 value=1 prim::ListConstruct pnnx_7165 5 1 22265 22266 22267 22268 8022 8053 Tensor.reshape Tensor.reshape_504 2 1 8048 8051 8052 $input=8048 $shape=8051 #8048=(36,64,576)f32 #8052=(36,64,3,6,32)f32 prim::Constant pnnx_7167 0 1 22269 value=0 prim::Constant pnnx_7168 0 1 22270 value=0 prim::Constant pnnx_7170 0 1 22271 value=0 prim::Constant pnnx_7171 0 1 22272 value=1 prim::Constant pnnx_7173 0 1 22273 value=0 prim::Constant pnnx_7174 0 1 22274 value=2 torch.permute torch.permute_2669 2 1 8052 8053 qkv0.73 $input=8052 $dims=8053 #8052=(36,64,3,6,32)f32 #qkv0.73=(3,36,6,64,32)f32 Tensor.select Tensor.select_755 3 1 qkv0.73 22269 22270 q.73 $input=qkv0.73 $dim=22269 $index=22270 #qkv0.73=(3,36,6,64,32)f32 #q.73=(36,6,64,32)f32 aten::mul pnnx_7176 2 1 q.73 8014 q0.73 #q.73=(36,6,64,32)f32 #q0.73=(36,6,64,32)f32 Tensor.select Tensor.select_756 3 1 qkv0.73 22271 22272 k.73 $input=qkv0.73 $dim=22271 $index=22272 #qkv0.73=(3,36,6,64,32)f32 #k.73=(36,6,64,32)f32 prim::Constant pnnx_7179 0 1 22275 value=-1 prim::ListConstruct pnnx_7180 1 1 22275 8061 Tensor.view Tensor.view_1367 2 1 relative_position_index.73 8061 8062 $input=relative_position_index.73 $shape=8061 #relative_position_index.73=(64,64)i64 #8062=(4096)i64 prim::ListConstruct pnnx_7182 1 1 8062 8063 #8062=(4096)i64 prim::Constant pnnx_7184 0 1 22276 value=64 prim::Constant pnnx_7185 0 1 22277 value=-1 prim::ListConstruct pnnx_7186 3 1 8025 22276 22277 8065 Tensor.index Tensor.index_361 2 1 relative_position_bias_table.73 8063 8064 $input=relative_position_bias_table.73 $expr=8063 #relative_position_bias_table.73=(225,6)f32 #8064=(4096,6)f32 prim::Constant pnnx_7188 0 1 22278 value=2 prim::Constant pnnx_7189 0 1 22279 value=0 prim::Constant pnnx_7190 0 1 22280 value=1 prim::ListConstruct pnnx_7191 3 1 22278 22279 22280 8067 Tensor.view Tensor.view_1368 2 1 8064 8065 relative_position_bias.73 $input=8064 $shape=8065 #8064=(4096,6)f32 #relative_position_bias.73=(64,64,6)f32 prim::Constant pnnx_7195 0 1 22282 value=0 torch.permute torch.permute_2670 2 1 relative_position_bias.73 8067 8068 $input=relative_position_bias.73 $dims=8067 #relative_position_bias.73=(64,64,6)f32 #8068=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_108 1 1 8068 relative_position_bias0.73 memory_format=torch.contiguous_format $input=8068 #8068=(6,64,64)f32 #relative_position_bias0.73=(6,64,64)f32 prim::Constant pnnx_7197 0 1 22283 value=1 torch.transpose torch.transpose_3041 3 1 k.73 8023 8024 8059 $input=k.73 $dim0=8023 $dim1=8024 #k.73=(36,6,64,32)f32 #8059=(36,6,32,64)f32 torch.matmul torch.matmul_2274 2 1 q0.73 8059 attn.147 $input=q0.73 $other=8059 #q0.73=(36,6,64,32)f32 #8059=(36,6,32,64)f32 #attn.147=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3287 2 1 relative_position_bias0.73 22282 8070 $input=relative_position_bias0.73 $dim=22282 #relative_position_bias0.73=(6,64,64)f32 #8070=(1,6,64,64)f32 aten::add pnnx_7198 3 1 attn.147 8070 22283 attn0.37 #attn.147=(36,6,64,64)f32 #8070=(1,6,64,64)f32 #attn0.37=(36,6,64,64)f32 prim::Constant pnnx_7199 0 1 22284 value=0 aten::size pnnx_7200 2 1 attn_mask.37 22284 8072 #attn_mask.37=(36,64,64)f32 prim::NumToTensor pnnx_7201 1 1 8072 other.37 aten::Int pnnx_7202 1 1 other.37 8074 prim::Constant pnnx_7203 0 1 22285 value=trunc aten::div pnnx_7204 3 1 B_.73 other.37 22285 8075 aten::Int pnnx_7205 1 1 8075 8076 prim::Constant pnnx_7206 0 1 22286 value=6 prim::ListConstruct pnnx_7207 5 1 8076 8074 22286 8043 8042 8077 prim::Constant pnnx_7209 0 1 22287 value=1 prim::Constant pnnx_7211 0 1 22288 value=0 prim::Constant pnnx_7213 0 1 22289 value=1 Tensor.view Tensor.view_1369 2 1 attn0.37 8077 8078 $input=attn0.37 $shape=8077 #attn0.37=(36,6,64,64)f32 #8078=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3288 2 1 attn_mask.37 22287 8079 $input=attn_mask.37 $dim=22287 #attn_mask.37=(36,64,64)f32 #8079=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3289 2 1 8079 22288 8080 $input=8079 $dim=22288 #8079=(36,1,64,64)f32 #8080=(1,36,1,64,64)f32 aten::add pnnx_7214 3 1 8078 8080 22289 attn1.37 #8078=(1,36,6,64,64)f32 #8080=(1,36,1,64,64)f32 #attn1.37=(1,36,6,64,64)f32 prim::Constant pnnx_7215 0 1 22290 value=-1 prim::Constant pnnx_7216 0 1 22291 value=6 prim::ListConstruct pnnx_7217 4 1 22290 22291 8041 8040 8082 Tensor.view Tensor.view_1370 2 1 attn1.37 8082 input.165 $input=attn1.37 $shape=8082 #attn1.37=(1,36,6,64,64)f32 #input.165=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.5.attn.softmax 1 1 input.165 8084 dim=-1 #input.165=(36,6,64,64)f32 #8084=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.attn.attn_drop 1 1 8084 8085 #8084=(36,6,64,64)f32 #8085=(36,6,64,64)f32 Tensor.select Tensor.select_757 3 1 qkv0.73 22273 22274 v.73 $input=qkv0.73 $dim=22273 $index=22274 #qkv0.73=(3,36,6,64,32)f32 #v.73=(36,6,64,32)f32 prim::Constant pnnx_7220 0 1 22292 value=1 prim::Constant pnnx_7221 0 1 22293 value=2 torch.matmul torch.matmul_2275 2 1 8085 v.73 8086 $input=8085 $other=v.73 #8085=(36,6,64,64)f32 #v.73=(36,6,64,32)f32 #8086=(36,6,64,32)f32 prim::ListConstruct pnnx_7223 3 1 8035 8039 8047 8088 torch.transpose torch.transpose_3042 3 1 8086 22292 22293 8087 $input=8086 $dim0=22292 $dim1=22293 #8086=(36,6,64,32)f32 #8087=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_505 2 1 8087 8088 input0.65 $input=8087 $shape=8088 #8087=(36,64,6,32)f32 #input0.65=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.5.attn.proj 1 1 input0.65 8090 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.65=(36,64,192)f32 #8090=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.attn.proj_drop 1 1 8090 8091 #8090=(36,64,192)f32 #8091=(36,64,192)f32 prim::Constant pnnx_7225 0 1 22294 value=-1 prim::Constant pnnx_7226 0 1 22295 value=8 prim::Constant pnnx_7227 0 1 22296 value=8 prim::ListConstruct pnnx_7228 4 1 22294 22295 22296 7981 8092 prim::Constant pnnx_7230 0 1 22297 value=8 prim::Constant pnnx_7231 0 1 22298 value=trunc aten::div pnnx_7232 3 1 H.1 22297 22298 8094 aten::Int pnnx_7233 1 1 8094 8095 prim::Constant pnnx_7234 0 1 22299 value=8 prim::Constant pnnx_7235 0 1 22300 value=trunc aten::div pnnx_7236 3 1 W.1 22299 22300 8096 aten::Int pnnx_7237 1 1 8096 8097 prim::Constant pnnx_7238 0 1 22301 value=1 prim::Constant pnnx_7239 0 1 22302 value=8 prim::Constant pnnx_7240 0 1 22303 value=8 prim::Constant pnnx_7241 0 1 22304 value=-1 prim::ListConstruct pnnx_7242 6 1 22301 8095 8097 22302 22303 22304 8098 prim::Constant pnnx_7244 0 1 22305 value=0 prim::Constant pnnx_7245 0 1 22306 value=1 prim::Constant pnnx_7246 0 1 22307 value=3 prim::Constant pnnx_7247 0 1 22308 value=2 prim::Constant pnnx_7248 0 1 22309 value=4 prim::Constant pnnx_7249 0 1 22310 value=5 prim::ListConstruct pnnx_7250 6 1 22305 22306 22307 22308 22309 22310 8100 Tensor.view Tensor.view_1371 2 1 8091 8092 windows.73 $input=8091 $shape=8092 #8091=(36,64,192)f32 #windows.73=(36,8,8,192)f32 Tensor.view Tensor.view_1372 2 1 windows.73 8098 x3.73 $input=windows.73 $shape=8098 #windows.73=(36,8,8,192)f32 #x3.73=(1,6,6,8,8,192)f32 prim::Constant pnnx_7254 0 1 22312 value=1 prim::Constant pnnx_7255 0 1 22313 value=-1 prim::ListConstruct pnnx_7256 4 1 22312 169 409 22313 8103 torch.permute torch.permute_2671 2 1 x3.73 8100 8101 $input=x3.73 $dims=8100 #x3.73=(1,6,6,8,8,192)f32 #8101=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_109 1 1 8101 8102 memory_format=torch.contiguous_format $input=8101 #8101=(1,6,8,6,8,192)f32 #8102=(1,6,8,6,8,192)f32 prim::Constant pnnx_7258 0 1 22314 value=4 prim::Constant pnnx_7259 0 1 22315 value=4 prim::ListConstruct pnnx_7260 2 1 22314 22315 8105 prim::Constant pnnx_7261 0 1 22316 value=1 prim::Constant pnnx_7262 0 1 22317 value=2 prim::ListConstruct pnnx_7263 2 1 22316 22317 8106 Tensor.view Tensor.view_1373 2 1 8102 8103 shifted_x.37 $input=8102 $shape=8103 #8102=(1,6,8,6,8,192)f32 #shifted_x.37=(1,48,48,192)f32 aten::mul pnnx_7265 2 1 H.1 W.1 8108 aten::Int pnnx_7266 1 1 8108 8109 prim::ListConstruct pnnx_7267 3 1 7976 8109 7980 8110 prim::Constant pnnx_7269 0 1 8112 value=None prim::Constant pnnx_7270 0 1 22318 value=1 torch.roll torch.roll_2455 3 1 shifted_x.37 8105 8106 x4.73 $input=shifted_x.37 $shifts=8105 $dims=8106 #shifted_x.37=(1,48,48,192)f32 #x4.73=(1,48,48,192)f32 Tensor.view Tensor.view_1374 2 1 x4.73 8110 x5.37 $input=x4.73 $shape=8110 #x4.73=(1,48,48,192)f32 #x5.37=(1,2304,192)f32 aten::add pnnx_7271 3 1 7955 x5.37 22318 input.167 #7955=(1,2304,192)f32 #x5.37=(1,2304,192)f32 #input.167=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.5.norm2 1 1 input.167 8114 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.167=(1,2304,192)f32 #8114=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.5.mlp.fc1 1 1 8114 8119 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #8114=(1,2304,192)f32 #8119=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.5.mlp.act 1 1 8119 8120 #8119=(1,2304,384)f32 #8120=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.mlp.drop 1 1 8120 8121 #8120=(1,2304,384)f32 #8121=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.5.mlp.fc2 1 1 8121 8122 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #8121=(1,2304,384)f32 #8122=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.mlp.drop 1 1 8122 8123 #8122=(1,2304,192)f32 #8123=(1,2304,192)f32 prim::Constant pnnx_7272 0 1 8124 value=None prim::Constant pnnx_7273 0 1 22319 value=1 aten::add pnnx_7274 3 1 input.167 8123 22319 8125 #input.167=(1,2304,192)f32 #8123=(1,2304,192)f32 #8125=(1,2304,192)f32 prim::Constant pnnx_7275 0 1 8126 value=0 prim::Constant pnnx_7276 0 1 8127 value=1 prim::Constant pnnx_7277 0 1 8128 value=2 prim::Constant pnnx_7278 0 1 8129 value=192 aten::size pnnx_7279 2 1 8125 8126 8130 #8125=(1,2304,192)f32 prim::NumToTensor pnnx_7280 1 1 8130 B.75 aten::Int pnnx_7281 1 1 B.75 8132 prim::ListConstruct pnnx_7283 4 1 8132 8129 166 406 8134 torch.transpose torch.transpose_3043 3 1 8125 8127 8128 8133 $input=8125 $dim0=8127 $dim1=8128 #8125=(1,2304,192)f32 #8133=(1,192,2304)f32 Tensor.view Tensor.view_1375 2 1 8133 8134 input.141 $input=8133 $shape=8134 #8133=(1,192,2304)f32 #input.141=(1,192,48,48)f32 nn.Conv2d layers_dfe.5.conv 1 1 input.141 8136 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.141=(1,192,48,48)f32 #8136=(1,192,48,48)f32 prim::Constant pnnx_7285 0 1 8137 value=-1 prim::Constant pnnx_7286 0 1 8138 value=2 prim::Constant pnnx_7287 0 1 8139 value=1 prim::Constant pnnx_7289 0 1 22320 value=2 torch.flatten torch.flatten_2189 3 1 8136 8138 8137 8140 $input=8136 $start_dim=8138 $end_dim=8137 #8136=(1,192,48,48)f32 #8140=(1,192,2304)f32 torch.transpose torch.transpose_3044 3 1 8140 8139 22320 8141 $input=8140 $dim0=8139 $dim1=22320 #8140=(1,192,2304)f32 #8141=(1,2304,192)f32 aten::add pnnx_7291 3 1 8141 7160 7161 8142 #8141=(1,2304,192)f32 #7160=(1,2304,192)f32 #8142=(1,2304,192)f32 nn.LayerNorm norm_dfe 1 1 8142 832 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #8142=(1,2304,192)f32 #832=(1,2304,192)f32 prim::Constant pnnx_7292 0 1 8143 value=0 prim::Constant pnnx_7293 0 1 8144 value=1 prim::Constant pnnx_7294 0 1 8145 value=2 prim::Constant pnnx_7295 0 1 8146 value=192 aten::size pnnx_7296 2 1 832 8143 8147 #832=(1,2304,192)f32 prim::NumToTensor pnnx_7297 1 1 8147 B.89 aten::Int pnnx_7298 1 1 B.89 8149 prim::ListConstruct pnnx_7300 4 1 8149 8146 163 403 8151 torch.transpose torch.transpose_3045 3 1 832 8144 8145 8150 $input=832 $dim0=8144 $dim1=8145 #832=(1,2304,192)f32 #8150=(1,192,2304)f32 Tensor.view Tensor.view_1376 2 1 8150 8151 input.143 $input=8150 $shape=8151 #8150=(1,192,2304)f32 #input.143=(1,192,48,48)f32 nn.Conv2d conv_after_body_dfe 1 1 input.143 838 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.143=(1,192,48,48)f32 #838=(1,192,48,48)f32 prim::Constant pnnx_7302 0 1 22321 value=1 aten::add pnnx_7303 3 1 838 154 22321 x_a.1 #838=(1,192,48,48)f32 #154=(1,192,48,48)f32 #x_a.1=(1,192,48,48)f32 prim::Constant pnnx_7304 0 1 22322 value=2 aten::size pnnx_7305 2 1 157 22322 844 #157=(1,192,48,48)f32 prim::NumToTensor pnnx_7306 1 1 844 H0.1 aten::Int pnnx_7307 1 1 H0.1 848 aten::Int pnnx_7308 1 1 H0.1 851 aten::Int pnnx_7309 1 1 H0.1 854 aten::Int pnnx_7310 1 1 H0.1 857 aten::Int pnnx_7311 1 1 H0.1 860 aten::Int pnnx_7312 1 1 H0.1 863 aten::Int pnnx_7313 1 1 H0.1 866 aten::Int pnnx_7314 1 1 H0.1 869 aten::Int pnnx_7315 1 1 H0.1 872 aten::Int pnnx_7316 1 1 H0.1 875 aten::Int pnnx_7317 1 1 H0.1 878 aten::Int pnnx_7318 1 1 H0.1 881 aten::Int pnnx_7319 1 1 H0.1 884 aten::Int pnnx_7320 1 1 H0.1 887 aten::Int pnnx_7321 1 1 H0.1 890 aten::Int pnnx_7322 1 1 H0.1 893 aten::Int pnnx_7323 1 1 H0.1 896 aten::Int pnnx_7324 1 1 H0.1 899 aten::Int pnnx_7325 1 1 H0.1 902 aten::Int pnnx_7326 1 1 H0.1 905 aten::Int pnnx_7327 1 1 H0.1 908 aten::Int pnnx_7328 1 1 H0.1 911 aten::Int pnnx_7329 1 1 H0.1 914 aten::Int pnnx_7330 1 1 H0.1 917 aten::Int pnnx_7331 1 1 H0.1 920 aten::Int pnnx_7332 1 1 H0.1 923 aten::Int pnnx_7333 1 1 H0.1 926 aten::Int pnnx_7334 1 1 H0.1 929 aten::Int pnnx_7335 1 1 H0.1 932 aten::Int pnnx_7336 1 1 H0.1 935 aten::Int pnnx_7337 1 1 H0.1 938 aten::Int pnnx_7338 1 1 H0.1 941 aten::Int pnnx_7339 1 1 H0.1 944 aten::Int pnnx_7340 1 1 H0.1 947 aten::Int pnnx_7341 1 1 H0.1 950 aten::Int pnnx_7342 1 1 H0.1 953 aten::Int pnnx_7343 1 1 H0.1 956 aten::Int pnnx_7344 1 1 H0.1 959 aten::Int pnnx_7345 1 1 H0.1 962 aten::Int pnnx_7346 1 1 H0.1 965 aten::Int pnnx_7347 1 1 H0.1 968 aten::Int pnnx_7348 1 1 H0.1 971 aten::Int pnnx_7349 1 1 H0.1 974 aten::Int pnnx_7350 1 1 H0.1 977 aten::Int pnnx_7351 1 1 H0.1 980 aten::Int pnnx_7352 1 1 H0.1 983 aten::Int pnnx_7353 1 1 H0.1 986 aten::Int pnnx_7354 1 1 H0.1 989 aten::Int pnnx_7355 1 1 H0.1 992 aten::Int pnnx_7356 1 1 H0.1 995 aten::Int pnnx_7357 1 1 H0.1 998 aten::Int pnnx_7358 1 1 H0.1 1001 aten::Int pnnx_7359 1 1 H0.1 1004 aten::Int pnnx_7360 1 1 H0.1 1007 aten::Int pnnx_7361 1 1 H0.1 1010 aten::Int pnnx_7362 1 1 H0.1 1013 aten::Int pnnx_7363 1 1 H0.1 1016 aten::Int pnnx_7364 1 1 H0.1 1019 aten::Int pnnx_7365 1 1 H0.1 1022 aten::Int pnnx_7366 1 1 H0.1 1025 aten::Int pnnx_7367 1 1 H0.1 1028 aten::Int pnnx_7368 1 1 H0.1 1031 aten::Int pnnx_7369 1 1 H0.1 1034 aten::Int pnnx_7370 1 1 H0.1 1037 aten::Int pnnx_7371 1 1 H0.1 1040 aten::Int pnnx_7372 1 1 H0.1 1043 aten::Int pnnx_7373 1 1 H0.1 1046 aten::Int pnnx_7374 1 1 H0.1 1049 aten::Int pnnx_7375 1 1 H0.1 1052 aten::Int pnnx_7376 1 1 H0.1 1055 aten::Int pnnx_7377 1 1 H0.1 1058 aten::Int pnnx_7378 1 1 H0.1 1061 aten::Int pnnx_7379 1 1 H0.1 1064 aten::Int pnnx_7380 1 1 H0.1 1067 aten::Int pnnx_7381 1 1 H0.1 1070 aten::Int pnnx_7382 1 1 H0.1 1073 aten::Int pnnx_7383 1 1 H0.1 1076 aten::Int pnnx_7384 1 1 H0.1 1079 aten::Int pnnx_7385 1 1 H0.1 1082 prim::Constant pnnx_7386 0 1 22323 value=3 aten::size pnnx_7387 2 1 157 22323 1084 #157=(1,192,48,48)f32 prim::NumToTensor pnnx_7388 1 1 1084 W0.1 aten::Int pnnx_7389 1 1 W0.1 1088 aten::Int pnnx_7390 1 1 W0.1 1091 aten::Int pnnx_7391 1 1 W0.1 1094 aten::Int pnnx_7392 1 1 W0.1 1097 aten::Int pnnx_7393 1 1 W0.1 1100 aten::Int pnnx_7394 1 1 W0.1 1103 aten::Int pnnx_7395 1 1 W0.1 1106 aten::Int pnnx_7396 1 1 W0.1 1109 aten::Int pnnx_7397 1 1 W0.1 1112 aten::Int pnnx_7398 1 1 W0.1 1115 aten::Int pnnx_7399 1 1 W0.1 1118 aten::Int pnnx_7400 1 1 W0.1 1121 aten::Int pnnx_7401 1 1 W0.1 1124 aten::Int pnnx_7402 1 1 W0.1 1127 aten::Int pnnx_7403 1 1 W0.1 1130 aten::Int pnnx_7404 1 1 W0.1 1133 aten::Int pnnx_7405 1 1 W0.1 1136 aten::Int pnnx_7406 1 1 W0.1 1139 aten::Int pnnx_7407 1 1 W0.1 1142 aten::Int pnnx_7408 1 1 W0.1 1145 aten::Int pnnx_7409 1 1 W0.1 1148 aten::Int pnnx_7410 1 1 W0.1 1151 aten::Int pnnx_7411 1 1 W0.1 1154 aten::Int pnnx_7412 1 1 W0.1 1157 aten::Int pnnx_7413 1 1 W0.1 1160 aten::Int pnnx_7414 1 1 W0.1 1163 aten::Int pnnx_7415 1 1 W0.1 1166 aten::Int pnnx_7416 1 1 W0.1 1169 aten::Int pnnx_7417 1 1 W0.1 1172 aten::Int pnnx_7418 1 1 W0.1 1175 aten::Int pnnx_7419 1 1 W0.1 1178 aten::Int pnnx_7420 1 1 W0.1 1181 aten::Int pnnx_7421 1 1 W0.1 1184 aten::Int pnnx_7422 1 1 W0.1 1187 aten::Int pnnx_7423 1 1 W0.1 1190 aten::Int pnnx_7424 1 1 W0.1 1193 aten::Int pnnx_7425 1 1 W0.1 1196 aten::Int pnnx_7426 1 1 W0.1 1199 aten::Int pnnx_7427 1 1 W0.1 1202 aten::Int pnnx_7428 1 1 W0.1 1205 aten::Int pnnx_7429 1 1 W0.1 1208 aten::Int pnnx_7430 1 1 W0.1 1211 aten::Int pnnx_7431 1 1 W0.1 1214 aten::Int pnnx_7432 1 1 W0.1 1217 aten::Int pnnx_7433 1 1 W0.1 1220 aten::Int pnnx_7434 1 1 W0.1 1223 aten::Int pnnx_7435 1 1 W0.1 1226 aten::Int pnnx_7436 1 1 W0.1 1229 aten::Int pnnx_7437 1 1 W0.1 1232 aten::Int pnnx_7438 1 1 W0.1 1235 aten::Int pnnx_7439 1 1 W0.1 1238 aten::Int pnnx_7440 1 1 W0.1 1241 aten::Int pnnx_7441 1 1 W0.1 1244 aten::Int pnnx_7442 1 1 W0.1 1247 aten::Int pnnx_7443 1 1 W0.1 1250 aten::Int pnnx_7444 1 1 W0.1 1253 aten::Int pnnx_7445 1 1 W0.1 1256 aten::Int pnnx_7446 1 1 W0.1 1259 aten::Int pnnx_7447 1 1 W0.1 1262 aten::Int pnnx_7448 1 1 W0.1 1265 aten::Int pnnx_7449 1 1 W0.1 1268 aten::Int pnnx_7450 1 1 W0.1 1271 aten::Int pnnx_7451 1 1 W0.1 1274 aten::Int pnnx_7452 1 1 W0.1 1277 aten::Int pnnx_7453 1 1 W0.1 1280 aten::Int pnnx_7454 1 1 W0.1 1283 aten::Int pnnx_7455 1 1 W0.1 1286 aten::Int pnnx_7456 1 1 W0.1 1289 aten::Int pnnx_7457 1 1 W0.1 1292 aten::Int pnnx_7458 1 1 W0.1 1295 aten::Int pnnx_7459 1 1 W0.1 1298 aten::Int pnnx_7460 1 1 W0.1 1301 aten::Int pnnx_7461 1 1 W0.1 1304 aten::Int pnnx_7462 1 1 W0.1 1307 aten::Int pnnx_7463 1 1 W0.1 1310 aten::Int pnnx_7464 1 1 W0.1 1313 aten::Int pnnx_7465 1 1 W0.1 1316 aten::Int pnnx_7466 1 1 W0.1 1319 aten::Int pnnx_7467 1 1 W0.1 1322 prim::Constant pnnx_7468 0 1 8153 value=-1 prim::Constant pnnx_7469 0 1 8154 value=2 prim::Constant pnnx_7470 0 1 8155 value=1 prim::Constant pnnx_7472 0 1 22324 value=2 torch.flatten torch.flatten_2190 3 1 157 8154 8153 8157 $input=157 $start_dim=8154 $end_dim=8153 #157=(1,192,48,48)f32 #8157=(1,192,2304)f32 torch.transpose torch.transpose_3046 3 1 8157 8155 22324 input.169 $input=8157 $dim0=8155 $dim1=22324 #8157=(1,192,2304)f32 #input.169=(1,2304,192)f32 nn.LayerNorm patch_embed_dfe.norm 1 1 input.169 8159 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.169=(1,2304,192)f32 #8159=(1,2304,192)f32 nn.Dropout pos_drop_dfe 1 1 8159 1327 #8159=(1,2304,192)f32 #1327=(1,2304,192)f32 prim::Constant pnnx_7474 0 1 8160 value=1 prim::Constant pnnx_7475 0 1 8177 value=trunc prim::Constant pnnx_7476 0 1 8178 value=8 prim::Constant pnnx_7477 0 1 8179 value=0 prim::Constant pnnx_7478 0 1 8180 value=2 prim::Constant pnnx_7479 0 1 8181 value=1 prim::Constant pnnx_7480 0 1 8182 value=3 prim::Constant pnnx_7481 0 1 8183 value=8 prim::Constant pnnx_7482 0 1 8184 value=4 prim::Constant pnnx_7483 0 1 8185 value=5 prim::Constant pnnx_7484 0 1 8186 value=-1 prim::Constant pnnx_7485 0 1 8187 value=64 aten::size pnnx_7486 2 1 1327 8179 8193 #1327=(1,2304,192)f32 prim::NumToTensor pnnx_7487 1 1 8193 B.91 aten::Int pnnx_7488 1 1 B.91 8195 aten::Int pnnx_7489 1 1 B.91 8196 aten::size pnnx_7490 2 1 1327 8180 8197 #1327=(1,2304,192)f32 prim::NumToTensor pnnx_7491 1 1 8197 C.155 aten::Int pnnx_7492 1 1 C.155 8199 aten::Int pnnx_7493 1 1 C.155 8200 aten::Int pnnx_7494 1 1 C.155 8201 aten::Int pnnx_7495 1 1 C.155 8202 nn.LayerNorm layers_dfe.0.residual_group.blocks.0.norm1 1 1 1327 8203 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #1327=(1,2304,192)f32 #8203=(1,2304,192)f32 prim::ListConstruct pnnx_7496 4 1 8196 1082 1322 8202 8204 prim::Constant pnnx_7498 0 1 22325 value=0 Tensor.view Tensor.view_1377 2 1 8203 8204 x.75 $input=8203 $shape=8204 #8203=(1,2304,192)f32 #x.75=(1,48,48,192)f32 aten::size pnnx_7499 2 1 x.75 22325 8206 #x.75=(1,48,48,192)f32 prim::NumToTensor pnnx_7500 1 1 8206 B1.3 aten::Int pnnx_7501 1 1 B1.3 8208 aten::size pnnx_7502 2 1 x.75 8181 8209 #x.75=(1,48,48,192)f32 prim::NumToTensor pnnx_7503 1 1 8209 8210 prim::Constant pnnx_7504 0 1 22326 value=2 aten::size pnnx_7505 2 1 x.75 22326 8211 #x.75=(1,48,48,192)f32 prim::NumToTensor pnnx_7506 1 1 8211 8212 aten::size pnnx_7507 2 1 x.75 8182 8213 #x.75=(1,48,48,192)f32 prim::NumToTensor pnnx_7508 1 1 8213 C1.3 aten::Int pnnx_7509 1 1 C1.3 8215 aten::Int pnnx_7510 1 1 C1.3 8216 aten::div pnnx_7511 3 1 8210 8178 8177 8217 aten::Int pnnx_7512 1 1 8217 8218 prim::Constant pnnx_7513 0 1 22327 value=8 prim::Constant pnnx_7514 0 1 22328 value=trunc aten::div pnnx_7515 3 1 8212 22327 22328 8219 aten::Int pnnx_7516 1 1 8219 8220 prim::Constant pnnx_7517 0 1 22329 value=8 prim::ListConstruct pnnx_7518 6 1 8208 8218 8183 8220 22329 8216 8221 prim::Constant pnnx_7520 0 1 22330 value=0 prim::Constant pnnx_7521 0 1 22331 value=1 prim::Constant pnnx_7522 0 1 22332 value=3 prim::Constant pnnx_7523 0 1 22333 value=2 prim::ListConstruct pnnx_7524 6 1 22330 22331 22332 22333 8184 8185 8223 Tensor.view Tensor.view_1378 2 1 x.75 8221 x5.39 $input=x.75 $shape=8221 #x.75=(1,48,48,192)f32 #x5.39=(1,6,8,6,8,192)f32 prim::Constant pnnx_7528 0 1 22335 value=8 prim::Constant pnnx_7529 0 1 22336 value=8 prim::ListConstruct pnnx_7530 4 1 8186 22335 22336 8215 8226 torch.permute torch.permute_2672 2 1 x5.39 8223 8224 $input=x5.39 $dims=8223 #x5.39=(1,6,8,6,8,192)f32 #8224=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_110 1 1 8224 8225 memory_format=torch.contiguous_format $input=8224 #8224=(1,6,6,8,8,192)f32 #8225=(1,6,6,8,8,192)f32 prim::Constant pnnx_7532 0 1 22337 value=-1 prim::ListConstruct pnnx_7533 3 1 22337 8187 8201 8228 prim::Constant pnnx_7535 0 1 8230 value=1.767767e-01 prim::Constant pnnx_7536 0 1 8231 value=trunc prim::Constant pnnx_7537 0 1 8232 value=6 prim::Constant pnnx_7538 0 1 8233 value=0 prim::Constant pnnx_7539 0 1 8234 value=1 prim::Constant pnnx_7540 0 1 8235 value=2 prim::Constant pnnx_7541 0 1 8236 value=3 prim::Constant pnnx_7542 0 1 8237 value=6 prim::Constant pnnx_7543 0 1 8238 value=4 prim::Constant pnnx_7544 0 1 8239 value=-2 prim::Constant pnnx_7545 0 1 8240 value=-1 prim::Constant pnnx_7546 0 1 8241 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.0.attn 0 1 relative_position_bias_table.75 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.75=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.0.attn 0 1 relative_position_index.75 @relative_position_index=(64,64)i64 #relative_position_index.75=(64,64)i64 Tensor.view Tensor.view_1379 2 1 8225 8226 x_windows.75 $input=8225 $shape=8226 #8225=(1,6,6,8,8,192)f32 #x_windows.75=(36,8,8,192)f32 Tensor.view Tensor.view_1380 2 1 x_windows.75 8228 x6.3 $input=x_windows.75 $shape=8228 #x_windows.75=(36,8,8,192)f32 #x6.3=(36,64,192)f32 aten::size pnnx_7547 2 1 x6.3 8233 8249 #x6.3=(36,64,192)f32 prim::NumToTensor pnnx_7548 1 1 8249 B_.75 aten::Int pnnx_7549 1 1 B_.75 8251 aten::Int pnnx_7550 1 1 B_.75 8252 aten::size pnnx_7551 2 1 x6.3 8234 8253 #x6.3=(36,64,192)f32 prim::NumToTensor pnnx_7552 1 1 8253 N.75 aten::Int pnnx_7553 1 1 N.75 8255 aten::Int pnnx_7554 1 1 N.75 8256 aten::size pnnx_7555 2 1 x6.3 8235 8257 #x6.3=(36,64,192)f32 prim::NumToTensor pnnx_7556 1 1 8257 C.157 aten::Int pnnx_7557 1 1 C.157 8259 nn.Linear layers_dfe.0.residual_group.blocks.0.attn.qkv 1 1 x6.3 8260 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.3=(36,64,192)f32 #8260=(36,64,576)f32 aten::div pnnx_7558 3 1 C.157 8232 8231 8261 aten::Int pnnx_7559 1 1 8261 8262 prim::ListConstruct pnnx_7560 5 1 8252 8256 8236 8237 8262 8263 prim::Constant pnnx_7562 0 1 22338 value=2 prim::Constant pnnx_7563 0 1 22339 value=0 prim::Constant pnnx_7564 0 1 22340 value=3 prim::Constant pnnx_7565 0 1 22341 value=1 prim::ListConstruct pnnx_7566 5 1 22338 22339 22340 22341 8238 8265 Tensor.reshape Tensor.reshape_506 2 1 8260 8263 8264 $input=8260 $shape=8263 #8260=(36,64,576)f32 #8264=(36,64,3,6,32)f32 prim::Constant pnnx_7568 0 1 22342 value=0 prim::Constant pnnx_7569 0 1 22343 value=0 prim::Constant pnnx_7571 0 1 22344 value=0 prim::Constant pnnx_7572 0 1 22345 value=1 prim::Constant pnnx_7574 0 1 22346 value=0 prim::Constant pnnx_7575 0 1 22347 value=2 torch.permute torch.permute_2673 2 1 8264 8265 qkv1.3 $input=8264 $dims=8265 #8264=(36,64,3,6,32)f32 #qkv1.3=(3,36,6,64,32)f32 Tensor.select Tensor.select_758 3 1 qkv1.3 22342 22343 q.75 $input=qkv1.3 $dim=22342 $index=22343 #qkv1.3=(3,36,6,64,32)f32 #q.75=(36,6,64,32)f32 aten::mul pnnx_7577 2 1 q.75 8230 q1.3 #q.75=(36,6,64,32)f32 #q1.3=(36,6,64,32)f32 Tensor.select Tensor.select_759 3 1 qkv1.3 22344 22345 k.75 $input=qkv1.3 $dim=22344 $index=22345 #qkv1.3=(3,36,6,64,32)f32 #k.75=(36,6,64,32)f32 prim::Constant pnnx_7580 0 1 22348 value=-1 prim::ListConstruct pnnx_7581 1 1 22348 8273 Tensor.view Tensor.view_1381 2 1 relative_position_index.75 8273 8274 $input=relative_position_index.75 $shape=8273 #relative_position_index.75=(64,64)i64 #8274=(4096)i64 prim::ListConstruct pnnx_7583 1 1 8274 8275 #8274=(4096)i64 prim::Constant pnnx_7585 0 1 22349 value=64 prim::Constant pnnx_7586 0 1 22350 value=-1 prim::ListConstruct pnnx_7587 3 1 8241 22349 22350 8277 Tensor.index Tensor.index_362 2 1 relative_position_bias_table.75 8275 8276 $input=relative_position_bias_table.75 $expr=8275 #relative_position_bias_table.75=(225,6)f32 #8276=(4096,6)f32 prim::Constant pnnx_7589 0 1 22351 value=2 prim::Constant pnnx_7590 0 1 22352 value=0 prim::Constant pnnx_7591 0 1 22353 value=1 prim::ListConstruct pnnx_7592 3 1 22351 22352 22353 8279 Tensor.view Tensor.view_1382 2 1 8276 8277 relative_position_bias.75 $input=8276 $shape=8277 #8276=(4096,6)f32 #relative_position_bias.75=(64,64,6)f32 prim::Constant pnnx_7596 0 1 22355 value=0 torch.permute torch.permute_2674 2 1 relative_position_bias.75 8279 8280 $input=relative_position_bias.75 $dims=8279 #relative_position_bias.75=(64,64,6)f32 #8280=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_111 1 1 8280 relative_position_bias1.3 memory_format=torch.contiguous_format $input=8280 #8280=(6,64,64)f32 #relative_position_bias1.3=(6,64,64)f32 prim::Constant pnnx_7598 0 1 22356 value=1 torch.transpose torch.transpose_3047 3 1 k.75 8239 8240 8271 $input=k.75 $dim0=8239 $dim1=8240 #k.75=(36,6,64,32)f32 #8271=(36,6,32,64)f32 torch.matmul torch.matmul_2276 2 1 q1.3 8271 attn.151 $input=q1.3 $other=8271 #q1.3=(36,6,64,32)f32 #8271=(36,6,32,64)f32 #attn.151=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3290 2 1 relative_position_bias1.3 22355 8282 $input=relative_position_bias1.3 $dim=22355 #relative_position_bias1.3=(6,64,64)f32 #8282=(1,6,64,64)f32 aten::add pnnx_7599 3 1 attn.151 8282 22356 input.171 #attn.151=(36,6,64,64)f32 #8282=(1,6,64,64)f32 #input.171=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.0.attn.softmax 1 1 input.171 8284 dim=-1 #input.171=(36,6,64,64)f32 #8284=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.attn.attn_drop 1 1 8284 8285 #8284=(36,6,64,64)f32 #8285=(36,6,64,64)f32 Tensor.select Tensor.select_760 3 1 qkv1.3 22346 22347 v.75 $input=qkv1.3 $dim=22346 $index=22347 #qkv1.3=(3,36,6,64,32)f32 #v.75=(36,6,64,32)f32 prim::Constant pnnx_7601 0 1 22357 value=1 prim::Constant pnnx_7602 0 1 22358 value=2 torch.matmul torch.matmul_2277 2 1 8285 v.75 8286 $input=8285 $other=v.75 #8285=(36,6,64,64)f32 #v.75=(36,6,64,32)f32 #8286=(36,6,64,32)f32 prim::ListConstruct pnnx_7604 3 1 8251 8255 8259 8288 torch.transpose torch.transpose_3048 3 1 8286 22357 22358 8287 $input=8286 $dim0=22357 $dim1=22358 #8286=(36,6,64,32)f32 #8287=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_507 2 1 8287 8288 input1.5 $input=8287 $shape=8288 #8287=(36,64,6,32)f32 #input1.5=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.0.attn.proj 1 1 input1.5 8290 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.5=(36,64,192)f32 #8290=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.attn.proj_drop 1 1 8290 8291 #8290=(36,64,192)f32 #8291=(36,64,192)f32 prim::Constant pnnx_7606 0 1 22359 value=-1 prim::Constant pnnx_7607 0 1 22360 value=8 prim::Constant pnnx_7608 0 1 22361 value=8 prim::ListConstruct pnnx_7609 4 1 22359 22360 22361 8200 8292 prim::Constant pnnx_7611 0 1 22362 value=8 prim::Constant pnnx_7612 0 1 22363 value=trunc aten::div pnnx_7613 3 1 H0.1 22362 22363 8294 aten::Int pnnx_7614 1 1 8294 8295 prim::Constant pnnx_7615 0 1 22364 value=8 prim::Constant pnnx_7616 0 1 22365 value=trunc aten::div pnnx_7617 3 1 W0.1 22364 22365 8296 aten::Int pnnx_7618 1 1 8296 8297 prim::Constant pnnx_7619 0 1 22366 value=1 prim::Constant pnnx_7620 0 1 22367 value=8 prim::Constant pnnx_7621 0 1 22368 value=8 prim::Constant pnnx_7622 0 1 22369 value=-1 prim::ListConstruct pnnx_7623 6 1 22366 8295 8297 22367 22368 22369 8298 prim::Constant pnnx_7625 0 1 22370 value=0 prim::Constant pnnx_7626 0 1 22371 value=1 prim::Constant pnnx_7627 0 1 22372 value=3 prim::Constant pnnx_7628 0 1 22373 value=2 prim::Constant pnnx_7629 0 1 22374 value=4 prim::Constant pnnx_7630 0 1 22375 value=5 prim::ListConstruct pnnx_7631 6 1 22370 22371 22372 22373 22374 22375 8300 Tensor.view Tensor.view_1383 2 1 8291 8292 windows.75 $input=8291 $shape=8292 #8291=(36,64,192)f32 #windows.75=(36,8,8,192)f32 Tensor.view Tensor.view_1384 2 1 windows.75 8298 x7.3 $input=windows.75 $shape=8298 #windows.75=(36,8,8,192)f32 #x7.3=(1,6,6,8,8,192)f32 prim::Constant pnnx_7635 0 1 22377 value=1 prim::Constant pnnx_7636 0 1 22378 value=-1 prim::ListConstruct pnnx_7637 4 1 22377 1079 1319 22378 8303 torch.permute torch.permute_2675 2 1 x7.3 8300 8301 $input=x7.3 $dims=8300 #x7.3=(1,6,6,8,8,192)f32 #8301=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_112 1 1 8301 8302 memory_format=torch.contiguous_format $input=8301 #8301=(1,6,8,6,8,192)f32 #8302=(1,6,8,6,8,192)f32 aten::mul pnnx_7639 2 1 H0.1 W0.1 8305 aten::Int pnnx_7640 1 1 8305 8306 prim::ListConstruct pnnx_7641 3 1 8195 8306 8199 8307 prim::Constant pnnx_7643 0 1 8309 value=None prim::Constant pnnx_7644 0 1 22379 value=1 Tensor.view Tensor.view_1385 2 1 8302 8303 x8.3 $input=8302 $shape=8303 #8302=(1,6,8,6,8,192)f32 #x8.3=(1,48,48,192)f32 Tensor.view Tensor.view_1386 2 1 x8.3 8307 x9.3 $input=x8.3 $shape=8307 #x8.3=(1,48,48,192)f32 #x9.3=(1,2304,192)f32 aten::add pnnx_7645 3 1 1327 x9.3 22379 input.173 #1327=(1,2304,192)f32 #x9.3=(1,2304,192)f32 #input.173=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.0.norm2 1 1 input.173 8311 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.173=(1,2304,192)f32 #8311=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.0.mlp.fc1 1 1 8311 8316 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #8311=(1,2304,192)f32 #8316=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.0.mlp.act 1 1 8316 8317 #8316=(1,2304,384)f32 #8317=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.mlp.drop 1 1 8317 8318 #8317=(1,2304,384)f32 #8318=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.0.mlp.fc2 1 1 8318 8319 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #8318=(1,2304,384)f32 #8319=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.0.mlp.drop 1 1 8319 8320 #8319=(1,2304,192)f32 #8320=(1,2304,192)f32 prim::Constant pnnx_7646 0 1 8321 value=None prim::Constant pnnx_7647 0 1 22380 value=1 aten::add pnnx_7648 3 1 input.173 8320 22380 8322 #input.173=(1,2304,192)f32 #8320=(1,2304,192)f32 #8322=(1,2304,192)f32 prim::Constant pnnx_7649 0 1 8323 value=trunc prim::Constant pnnx_7650 0 1 8324 value=8 prim::Constant pnnx_7651 0 1 8325 value=0 prim::Constant pnnx_7652 0 1 8326 value=2 prim::Constant pnnx_7653 0 1 8327 value=-4 prim::Constant pnnx_7654 0 1 8328 value=1 prim::Constant pnnx_7655 0 1 8329 value=3 prim::Constant pnnx_7656 0 1 8330 value=8 prim::Constant pnnx_7657 0 1 8331 value=4 prim::Constant pnnx_7658 0 1 8332 value=5 prim::Constant pnnx_7659 0 1 8333 value=-1 prim::Constant pnnx_7660 0 1 8334 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.1 0 1 attn_mask.39 @attn_mask=(36,64,64)f32 #attn_mask.39=(36,64,64)f32 aten::size pnnx_7661 2 1 8322 8325 8341 #8322=(1,2304,192)f32 prim::NumToTensor pnnx_7662 1 1 8341 B.93 aten::Int pnnx_7663 1 1 B.93 8343 aten::Int pnnx_7664 1 1 B.93 8344 aten::size pnnx_7665 2 1 8322 8326 8345 #8322=(1,2304,192)f32 prim::NumToTensor pnnx_7666 1 1 8345 C.159 aten::Int pnnx_7667 1 1 C.159 8347 aten::Int pnnx_7668 1 1 C.159 8348 aten::Int pnnx_7669 1 1 C.159 8349 aten::Int pnnx_7670 1 1 C.159 8350 nn.LayerNorm layers_dfe.0.residual_group.blocks.1.norm1 1 1 8322 8351 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #8322=(1,2304,192)f32 #8351=(1,2304,192)f32 prim::ListConstruct pnnx_7671 4 1 8344 1076 1316 8350 8352 prim::Constant pnnx_7673 0 1 22381 value=-4 prim::ListConstruct pnnx_7674 2 1 8327 22381 8354 prim::Constant pnnx_7675 0 1 22382 value=2 prim::ListConstruct pnnx_7676 2 1 8328 22382 8355 Tensor.view Tensor.view_1387 2 1 8351 8352 x.77 $input=8351 $shape=8352 #8351=(1,2304,192)f32 #x.77=(1,48,48,192)f32 prim::Constant pnnx_7678 0 1 22383 value=0 torch.roll torch.roll_2456 3 1 x.77 8354 8355 x6.5 $input=x.77 $shifts=8354 $dims=8355 #x.77=(1,48,48,192)f32 #x6.5=(1,48,48,192)f32 aten::size pnnx_7679 2 1 x6.5 22383 8357 #x6.5=(1,48,48,192)f32 prim::NumToTensor pnnx_7680 1 1 8357 B1.5 aten::Int pnnx_7681 1 1 B1.5 8359 prim::Constant pnnx_7682 0 1 22384 value=1 aten::size pnnx_7683 2 1 x6.5 22384 8360 #x6.5=(1,48,48,192)f32 prim::NumToTensor pnnx_7684 1 1 8360 8361 prim::Constant pnnx_7685 0 1 22385 value=2 aten::size pnnx_7686 2 1 x6.5 22385 8362 #x6.5=(1,48,48,192)f32 prim::NumToTensor pnnx_7687 1 1 8362 8363 aten::size pnnx_7688 2 1 x6.5 8329 8364 #x6.5=(1,48,48,192)f32 prim::NumToTensor pnnx_7689 1 1 8364 C1.5 aten::Int pnnx_7690 1 1 C1.5 8366 aten::Int pnnx_7691 1 1 C1.5 8367 aten::div pnnx_7692 3 1 8361 8324 8323 8368 aten::Int pnnx_7693 1 1 8368 8369 prim::Constant pnnx_7694 0 1 22386 value=8 prim::Constant pnnx_7695 0 1 22387 value=trunc aten::div pnnx_7696 3 1 8363 22386 22387 8370 aten::Int pnnx_7697 1 1 8370 8371 prim::Constant pnnx_7698 0 1 22388 value=8 prim::ListConstruct pnnx_7699 6 1 8359 8369 8330 8371 22388 8367 8372 prim::Constant pnnx_7701 0 1 22389 value=0 prim::Constant pnnx_7702 0 1 22390 value=1 prim::Constant pnnx_7703 0 1 22391 value=3 prim::Constant pnnx_7704 0 1 22392 value=2 prim::ListConstruct pnnx_7705 6 1 22389 22390 22391 22392 8331 8332 8374 Tensor.view Tensor.view_1388 2 1 x6.5 8372 x7.5 $input=x6.5 $shape=8372 #x6.5=(1,48,48,192)f32 #x7.5=(1,6,8,6,8,192)f32 prim::Constant pnnx_7709 0 1 22394 value=8 prim::Constant pnnx_7710 0 1 22395 value=8 prim::ListConstruct pnnx_7711 4 1 8333 22394 22395 8366 8377 torch.permute torch.permute_2676 2 1 x7.5 8374 8375 $input=x7.5 $dims=8374 #x7.5=(1,6,8,6,8,192)f32 #8375=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_113 1 1 8375 8376 memory_format=torch.contiguous_format $input=8375 #8375=(1,6,6,8,8,192)f32 #8376=(1,6,6,8,8,192)f32 prim::Constant pnnx_7713 0 1 22396 value=-1 prim::ListConstruct pnnx_7714 3 1 22396 8334 8349 8379 prim::Constant pnnx_7716 0 1 8381 value=1.767767e-01 prim::Constant pnnx_7717 0 1 8382 value=trunc prim::Constant pnnx_7718 0 1 8383 value=6 prim::Constant pnnx_7719 0 1 8384 value=0 prim::Constant pnnx_7720 0 1 8385 value=1 prim::Constant pnnx_7721 0 1 8386 value=2 prim::Constant pnnx_7722 0 1 8387 value=3 prim::Constant pnnx_7723 0 1 8388 value=6 prim::Constant pnnx_7724 0 1 8389 value=4 prim::Constant pnnx_7725 0 1 8390 value=-2 prim::Constant pnnx_7726 0 1 8391 value=-1 prim::Constant pnnx_7727 0 1 8392 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.1.attn 0 1 relative_position_bias_table.77 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.77=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.1.attn 0 1 relative_position_index.77 @relative_position_index=(64,64)i64 #relative_position_index.77=(64,64)i64 Tensor.view Tensor.view_1389 2 1 8376 8377 x_windows.77 $input=8376 $shape=8377 #8376=(1,6,6,8,8,192)f32 #x_windows.77=(36,8,8,192)f32 Tensor.view Tensor.view_1390 2 1 x_windows.77 8379 x8.5 $input=x_windows.77 $shape=8379 #x_windows.77=(36,8,8,192)f32 #x8.5=(36,64,192)f32 aten::size pnnx_7728 2 1 x8.5 8384 8400 #x8.5=(36,64,192)f32 prim::NumToTensor pnnx_7729 1 1 8400 B_.77 aten::Int pnnx_7730 1 1 B_.77 8402 aten::Int pnnx_7731 1 1 B_.77 8403 aten::size pnnx_7732 2 1 x8.5 8385 8404 #x8.5=(36,64,192)f32 prim::NumToTensor pnnx_7733 1 1 8404 N.77 aten::Int pnnx_7734 1 1 N.77 8406 aten::Int pnnx_7735 1 1 N.77 8407 aten::Int pnnx_7736 1 1 N.77 8408 aten::Int pnnx_7737 1 1 N.77 8409 aten::Int pnnx_7738 1 1 N.77 8410 aten::Int pnnx_7739 1 1 N.77 8411 aten::size pnnx_7740 2 1 x8.5 8386 8412 #x8.5=(36,64,192)f32 prim::NumToTensor pnnx_7741 1 1 8412 C.161 aten::Int pnnx_7742 1 1 C.161 8414 nn.Linear layers_dfe.0.residual_group.blocks.1.attn.qkv 1 1 x8.5 8415 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.5=(36,64,192)f32 #8415=(36,64,576)f32 aten::div pnnx_7743 3 1 C.161 8383 8382 8416 aten::Int pnnx_7744 1 1 8416 8417 prim::ListConstruct pnnx_7745 5 1 8403 8411 8387 8388 8417 8418 prim::Constant pnnx_7747 0 1 22397 value=2 prim::Constant pnnx_7748 0 1 22398 value=0 prim::Constant pnnx_7749 0 1 22399 value=3 prim::Constant pnnx_7750 0 1 22400 value=1 prim::ListConstruct pnnx_7751 5 1 22397 22398 22399 22400 8389 8420 Tensor.reshape Tensor.reshape_508 2 1 8415 8418 8419 $input=8415 $shape=8418 #8415=(36,64,576)f32 #8419=(36,64,3,6,32)f32 prim::Constant pnnx_7753 0 1 22401 value=0 prim::Constant pnnx_7754 0 1 22402 value=0 prim::Constant pnnx_7756 0 1 22403 value=0 prim::Constant pnnx_7757 0 1 22404 value=1 prim::Constant pnnx_7759 0 1 22405 value=0 prim::Constant pnnx_7760 0 1 22406 value=2 torch.permute torch.permute_2677 2 1 8419 8420 qkv1.5 $input=8419 $dims=8420 #8419=(36,64,3,6,32)f32 #qkv1.5=(3,36,6,64,32)f32 Tensor.select Tensor.select_761 3 1 qkv1.5 22401 22402 q.77 $input=qkv1.5 $dim=22401 $index=22402 #qkv1.5=(3,36,6,64,32)f32 #q.77=(36,6,64,32)f32 aten::mul pnnx_7762 2 1 q.77 8381 q1.5 #q.77=(36,6,64,32)f32 #q1.5=(36,6,64,32)f32 Tensor.select Tensor.select_762 3 1 qkv1.5 22403 22404 k.77 $input=qkv1.5 $dim=22403 $index=22404 #qkv1.5=(3,36,6,64,32)f32 #k.77=(36,6,64,32)f32 prim::Constant pnnx_7765 0 1 22407 value=-1 prim::ListConstruct pnnx_7766 1 1 22407 8428 Tensor.view Tensor.view_1391 2 1 relative_position_index.77 8428 8429 $input=relative_position_index.77 $shape=8428 #relative_position_index.77=(64,64)i64 #8429=(4096)i64 prim::ListConstruct pnnx_7768 1 1 8429 8430 #8429=(4096)i64 prim::Constant pnnx_7770 0 1 22408 value=64 prim::Constant pnnx_7771 0 1 22409 value=-1 prim::ListConstruct pnnx_7772 3 1 8392 22408 22409 8432 Tensor.index Tensor.index_363 2 1 relative_position_bias_table.77 8430 8431 $input=relative_position_bias_table.77 $expr=8430 #relative_position_bias_table.77=(225,6)f32 #8431=(4096,6)f32 prim::Constant pnnx_7774 0 1 22410 value=2 prim::Constant pnnx_7775 0 1 22411 value=0 prim::Constant pnnx_7776 0 1 22412 value=1 prim::ListConstruct pnnx_7777 3 1 22410 22411 22412 8434 Tensor.view Tensor.view_1392 2 1 8431 8432 relative_position_bias.77 $input=8431 $shape=8432 #8431=(4096,6)f32 #relative_position_bias.77=(64,64,6)f32 prim::Constant pnnx_7781 0 1 22414 value=0 torch.permute torch.permute_2678 2 1 relative_position_bias.77 8434 8435 $input=relative_position_bias.77 $dims=8434 #relative_position_bias.77=(64,64,6)f32 #8435=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_114 1 1 8435 relative_position_bias1.5 memory_format=torch.contiguous_format $input=8435 #8435=(6,64,64)f32 #relative_position_bias1.5=(6,64,64)f32 prim::Constant pnnx_7783 0 1 22415 value=1 torch.transpose torch.transpose_3049 3 1 k.77 8390 8391 8426 $input=k.77 $dim0=8390 $dim1=8391 #k.77=(36,6,64,32)f32 #8426=(36,6,32,64)f32 torch.matmul torch.matmul_2278 2 1 q1.5 8426 attn.155 $input=q1.5 $other=8426 #q1.5=(36,6,64,32)f32 #8426=(36,6,32,64)f32 #attn.155=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3291 2 1 relative_position_bias1.5 22414 8437 $input=relative_position_bias1.5 $dim=22414 #relative_position_bias1.5=(6,64,64)f32 #8437=(1,6,64,64)f32 aten::add pnnx_7784 3 1 attn.155 8437 22415 attn2.3 #attn.155=(36,6,64,64)f32 #8437=(1,6,64,64)f32 #attn2.3=(36,6,64,64)f32 prim::Constant pnnx_7785 0 1 22416 value=0 aten::size pnnx_7786 2 1 attn_mask.39 22416 8439 #attn_mask.39=(36,64,64)f32 prim::NumToTensor pnnx_7787 1 1 8439 other.39 aten::Int pnnx_7788 1 1 other.39 8441 prim::Constant pnnx_7789 0 1 22417 value=trunc aten::div pnnx_7790 3 1 B_.77 other.39 22417 8442 aten::Int pnnx_7791 1 1 8442 8443 prim::Constant pnnx_7792 0 1 22418 value=6 prim::ListConstruct pnnx_7793 5 1 8443 8441 22418 8410 8409 8444 prim::Constant pnnx_7795 0 1 22419 value=1 prim::Constant pnnx_7797 0 1 22420 value=0 prim::Constant pnnx_7799 0 1 22421 value=1 Tensor.view Tensor.view_1393 2 1 attn2.3 8444 8445 $input=attn2.3 $shape=8444 #attn2.3=(36,6,64,64)f32 #8445=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3292 2 1 attn_mask.39 22419 8446 $input=attn_mask.39 $dim=22419 #attn_mask.39=(36,64,64)f32 #8446=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3293 2 1 8446 22420 8447 $input=8446 $dim=22420 #8446=(36,1,64,64)f32 #8447=(1,36,1,64,64)f32 aten::add pnnx_7800 3 1 8445 8447 22421 attn3.3 #8445=(1,36,6,64,64)f32 #8447=(1,36,1,64,64)f32 #attn3.3=(1,36,6,64,64)f32 prim::Constant pnnx_7801 0 1 22422 value=-1 prim::Constant pnnx_7802 0 1 22423 value=6 prim::ListConstruct pnnx_7803 4 1 22422 22423 8408 8407 8449 Tensor.view Tensor.view_1394 2 1 attn3.3 8449 input.175 $input=attn3.3 $shape=8449 #attn3.3=(1,36,6,64,64)f32 #input.175=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.1.attn.softmax 1 1 input.175 8451 dim=-1 #input.175=(36,6,64,64)f32 #8451=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.attn.attn_drop 1 1 8451 8452 #8451=(36,6,64,64)f32 #8452=(36,6,64,64)f32 Tensor.select Tensor.select_763 3 1 qkv1.5 22405 22406 v.77 $input=qkv1.5 $dim=22405 $index=22406 #qkv1.5=(3,36,6,64,32)f32 #v.77=(36,6,64,32)f32 prim::Constant pnnx_7806 0 1 22424 value=1 prim::Constant pnnx_7807 0 1 22425 value=2 torch.matmul torch.matmul_2279 2 1 8452 v.77 8453 $input=8452 $other=v.77 #8452=(36,6,64,64)f32 #v.77=(36,6,64,32)f32 #8453=(36,6,64,32)f32 prim::ListConstruct pnnx_7809 3 1 8402 8406 8414 8455 torch.transpose torch.transpose_3050 3 1 8453 22424 22425 8454 $input=8453 $dim0=22424 $dim1=22425 #8453=(36,6,64,32)f32 #8454=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_509 2 1 8454 8455 input1.7 $input=8454 $shape=8455 #8454=(36,64,6,32)f32 #input1.7=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.1.attn.proj 1 1 input1.7 8457 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.7=(36,64,192)f32 #8457=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.attn.proj_drop 1 1 8457 8458 #8457=(36,64,192)f32 #8458=(36,64,192)f32 prim::Constant pnnx_7811 0 1 22426 value=-1 prim::Constant pnnx_7812 0 1 22427 value=8 prim::Constant pnnx_7813 0 1 22428 value=8 prim::ListConstruct pnnx_7814 4 1 22426 22427 22428 8348 8459 prim::Constant pnnx_7816 0 1 22429 value=8 prim::Constant pnnx_7817 0 1 22430 value=trunc aten::div pnnx_7818 3 1 H0.1 22429 22430 8461 aten::Int pnnx_7819 1 1 8461 8462 prim::Constant pnnx_7820 0 1 22431 value=8 prim::Constant pnnx_7821 0 1 22432 value=trunc aten::div pnnx_7822 3 1 W0.1 22431 22432 8463 aten::Int pnnx_7823 1 1 8463 8464 prim::Constant pnnx_7824 0 1 22433 value=1 prim::Constant pnnx_7825 0 1 22434 value=8 prim::Constant pnnx_7826 0 1 22435 value=8 prim::Constant pnnx_7827 0 1 22436 value=-1 prim::ListConstruct pnnx_7828 6 1 22433 8462 8464 22434 22435 22436 8465 prim::Constant pnnx_7830 0 1 22437 value=0 prim::Constant pnnx_7831 0 1 22438 value=1 prim::Constant pnnx_7832 0 1 22439 value=3 prim::Constant pnnx_7833 0 1 22440 value=2 prim::Constant pnnx_7834 0 1 22441 value=4 prim::Constant pnnx_7835 0 1 22442 value=5 prim::ListConstruct pnnx_7836 6 1 22437 22438 22439 22440 22441 22442 8467 Tensor.view Tensor.view_1395 2 1 8458 8459 windows.77 $input=8458 $shape=8459 #8458=(36,64,192)f32 #windows.77=(36,8,8,192)f32 Tensor.view Tensor.view_1396 2 1 windows.77 8465 x9.5 $input=windows.77 $shape=8465 #windows.77=(36,8,8,192)f32 #x9.5=(1,6,6,8,8,192)f32 prim::Constant pnnx_7840 0 1 22444 value=1 prim::Constant pnnx_7841 0 1 22445 value=-1 prim::ListConstruct pnnx_7842 4 1 22444 1073 1313 22445 8470 torch.permute torch.permute_2679 2 1 x9.5 8467 8468 $input=x9.5 $dims=8467 #x9.5=(1,6,6,8,8,192)f32 #8468=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_115 1 1 8468 8469 memory_format=torch.contiguous_format $input=8468 #8468=(1,6,8,6,8,192)f32 #8469=(1,6,8,6,8,192)f32 prim::Constant pnnx_7844 0 1 22446 value=4 prim::Constant pnnx_7845 0 1 22447 value=4 prim::ListConstruct pnnx_7846 2 1 22446 22447 8472 prim::Constant pnnx_7847 0 1 22448 value=1 prim::Constant pnnx_7848 0 1 22449 value=2 prim::ListConstruct pnnx_7849 2 1 22448 22449 8473 Tensor.view Tensor.view_1397 2 1 8469 8470 shifted_x.39 $input=8469 $shape=8470 #8469=(1,6,8,6,8,192)f32 #shifted_x.39=(1,48,48,192)f32 aten::mul pnnx_7851 2 1 H0.1 W0.1 8475 aten::Int pnnx_7852 1 1 8475 8476 prim::ListConstruct pnnx_7853 3 1 8343 8476 8347 8477 prim::Constant pnnx_7855 0 1 8479 value=None prim::Constant pnnx_7856 0 1 22450 value=1 torch.roll torch.roll_2457 3 1 shifted_x.39 8472 8473 x10.3 $input=shifted_x.39 $shifts=8472 $dims=8473 #shifted_x.39=(1,48,48,192)f32 #x10.3=(1,48,48,192)f32 Tensor.view Tensor.view_1398 2 1 x10.3 8477 x11.3 $input=x10.3 $shape=8477 #x10.3=(1,48,48,192)f32 #x11.3=(1,2304,192)f32 aten::add pnnx_7857 3 1 8322 x11.3 22450 input.177 #8322=(1,2304,192)f32 #x11.3=(1,2304,192)f32 #input.177=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.1.norm2 1 1 input.177 8481 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.177=(1,2304,192)f32 #8481=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.1.mlp.fc1 1 1 8481 8486 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #8481=(1,2304,192)f32 #8486=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.1.mlp.act 1 1 8486 8487 #8486=(1,2304,384)f32 #8487=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.mlp.drop 1 1 8487 8488 #8487=(1,2304,384)f32 #8488=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.1.mlp.fc2 1 1 8488 8489 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #8488=(1,2304,384)f32 #8489=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.1.mlp.drop 1 1 8489 8490 #8489=(1,2304,192)f32 #8490=(1,2304,192)f32 prim::Constant pnnx_7858 0 1 8491 value=None prim::Constant pnnx_7859 0 1 22451 value=1 aten::add pnnx_7860 3 1 input.177 8490 22451 8492 #input.177=(1,2304,192)f32 #8490=(1,2304,192)f32 #8492=(1,2304,192)f32 prim::Constant pnnx_7861 0 1 8493 value=trunc prim::Constant pnnx_7862 0 1 8494 value=8 prim::Constant pnnx_7863 0 1 8495 value=0 prim::Constant pnnx_7864 0 1 8496 value=2 prim::Constant pnnx_7865 0 1 8497 value=1 prim::Constant pnnx_7866 0 1 8498 value=3 prim::Constant pnnx_7867 0 1 8499 value=8 prim::Constant pnnx_7868 0 1 8500 value=4 prim::Constant pnnx_7869 0 1 8501 value=5 prim::Constant pnnx_7870 0 1 8502 value=-1 prim::Constant pnnx_7871 0 1 8503 value=64 aten::size pnnx_7872 2 1 8492 8495 8509 #8492=(1,2304,192)f32 prim::NumToTensor pnnx_7873 1 1 8509 B.95 aten::Int pnnx_7874 1 1 B.95 8511 aten::Int pnnx_7875 1 1 B.95 8512 aten::size pnnx_7876 2 1 8492 8496 8513 #8492=(1,2304,192)f32 prim::NumToTensor pnnx_7877 1 1 8513 C.163 aten::Int pnnx_7878 1 1 C.163 8515 aten::Int pnnx_7879 1 1 C.163 8516 aten::Int pnnx_7880 1 1 C.163 8517 aten::Int pnnx_7881 1 1 C.163 8518 nn.LayerNorm layers_dfe.0.residual_group.blocks.2.norm1 1 1 8492 8519 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #8492=(1,2304,192)f32 #8519=(1,2304,192)f32 prim::ListConstruct pnnx_7882 4 1 8512 1070 1310 8518 8520 prim::Constant pnnx_7884 0 1 22452 value=0 Tensor.view Tensor.view_1399 2 1 8519 8520 x.79 $input=8519 $shape=8520 #8519=(1,2304,192)f32 #x.79=(1,48,48,192)f32 aten::size pnnx_7885 2 1 x.79 22452 8522 #x.79=(1,48,48,192)f32 prim::NumToTensor pnnx_7886 1 1 8522 B1.7 aten::Int pnnx_7887 1 1 B1.7 8524 aten::size pnnx_7888 2 1 x.79 8497 8525 #x.79=(1,48,48,192)f32 prim::NumToTensor pnnx_7889 1 1 8525 8526 prim::Constant pnnx_7890 0 1 22453 value=2 aten::size pnnx_7891 2 1 x.79 22453 8527 #x.79=(1,48,48,192)f32 prim::NumToTensor pnnx_7892 1 1 8527 8528 aten::size pnnx_7893 2 1 x.79 8498 8529 #x.79=(1,48,48,192)f32 prim::NumToTensor pnnx_7894 1 1 8529 C1.7 aten::Int pnnx_7895 1 1 C1.7 8531 aten::Int pnnx_7896 1 1 C1.7 8532 aten::div pnnx_7897 3 1 8526 8494 8493 8533 aten::Int pnnx_7898 1 1 8533 8534 prim::Constant pnnx_7899 0 1 22454 value=8 prim::Constant pnnx_7900 0 1 22455 value=trunc aten::div pnnx_7901 3 1 8528 22454 22455 8535 aten::Int pnnx_7902 1 1 8535 8536 prim::Constant pnnx_7903 0 1 22456 value=8 prim::ListConstruct pnnx_7904 6 1 8524 8534 8499 8536 22456 8532 8537 prim::Constant pnnx_7906 0 1 22457 value=0 prim::Constant pnnx_7907 0 1 22458 value=1 prim::Constant pnnx_7908 0 1 22459 value=3 prim::Constant pnnx_7909 0 1 22460 value=2 prim::ListConstruct pnnx_7910 6 1 22457 22458 22459 22460 8500 8501 8539 Tensor.view Tensor.view_1400 2 1 x.79 8537 x5.41 $input=x.79 $shape=8537 #x.79=(1,48,48,192)f32 #x5.41=(1,6,8,6,8,192)f32 prim::Constant pnnx_7914 0 1 22462 value=8 prim::Constant pnnx_7915 0 1 22463 value=8 prim::ListConstruct pnnx_7916 4 1 8502 22462 22463 8531 8542 torch.permute torch.permute_2680 2 1 x5.41 8539 8540 $input=x5.41 $dims=8539 #x5.41=(1,6,8,6,8,192)f32 #8540=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_116 1 1 8540 8541 memory_format=torch.contiguous_format $input=8540 #8540=(1,6,6,8,8,192)f32 #8541=(1,6,6,8,8,192)f32 prim::Constant pnnx_7918 0 1 22464 value=-1 prim::ListConstruct pnnx_7919 3 1 22464 8503 8517 8544 prim::Constant pnnx_7921 0 1 8546 value=1.767767e-01 prim::Constant pnnx_7922 0 1 8547 value=trunc prim::Constant pnnx_7923 0 1 8548 value=6 prim::Constant pnnx_7924 0 1 8549 value=0 prim::Constant pnnx_7925 0 1 8550 value=1 prim::Constant pnnx_7926 0 1 8551 value=2 prim::Constant pnnx_7927 0 1 8552 value=3 prim::Constant pnnx_7928 0 1 8553 value=6 prim::Constant pnnx_7929 0 1 8554 value=4 prim::Constant pnnx_7930 0 1 8555 value=-2 prim::Constant pnnx_7931 0 1 8556 value=-1 prim::Constant pnnx_7932 0 1 8557 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.2.attn 0 1 relative_position_bias_table.79 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.79=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.2.attn 0 1 relative_position_index.79 @relative_position_index=(64,64)i64 #relative_position_index.79=(64,64)i64 Tensor.view Tensor.view_1401 2 1 8541 8542 x_windows.79 $input=8541 $shape=8542 #8541=(1,6,6,8,8,192)f32 #x_windows.79=(36,8,8,192)f32 Tensor.view Tensor.view_1402 2 1 x_windows.79 8544 x6.7 $input=x_windows.79 $shape=8544 #x_windows.79=(36,8,8,192)f32 #x6.7=(36,64,192)f32 aten::size pnnx_7933 2 1 x6.7 8549 8565 #x6.7=(36,64,192)f32 prim::NumToTensor pnnx_7934 1 1 8565 B_.79 aten::Int pnnx_7935 1 1 B_.79 8567 aten::Int pnnx_7936 1 1 B_.79 8568 aten::size pnnx_7937 2 1 x6.7 8550 8569 #x6.7=(36,64,192)f32 prim::NumToTensor pnnx_7938 1 1 8569 N.79 aten::Int pnnx_7939 1 1 N.79 8571 aten::Int pnnx_7940 1 1 N.79 8572 aten::size pnnx_7941 2 1 x6.7 8551 8573 #x6.7=(36,64,192)f32 prim::NumToTensor pnnx_7942 1 1 8573 C.165 aten::Int pnnx_7943 1 1 C.165 8575 nn.Linear layers_dfe.0.residual_group.blocks.2.attn.qkv 1 1 x6.7 8576 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.7=(36,64,192)f32 #8576=(36,64,576)f32 aten::div pnnx_7944 3 1 C.165 8548 8547 8577 aten::Int pnnx_7945 1 1 8577 8578 prim::ListConstruct pnnx_7946 5 1 8568 8572 8552 8553 8578 8579 prim::Constant pnnx_7948 0 1 22465 value=2 prim::Constant pnnx_7949 0 1 22466 value=0 prim::Constant pnnx_7950 0 1 22467 value=3 prim::Constant pnnx_7951 0 1 22468 value=1 prim::ListConstruct pnnx_7952 5 1 22465 22466 22467 22468 8554 8581 Tensor.reshape Tensor.reshape_510 2 1 8576 8579 8580 $input=8576 $shape=8579 #8576=(36,64,576)f32 #8580=(36,64,3,6,32)f32 prim::Constant pnnx_7954 0 1 22469 value=0 prim::Constant pnnx_7955 0 1 22470 value=0 prim::Constant pnnx_7957 0 1 22471 value=0 prim::Constant pnnx_7958 0 1 22472 value=1 prim::Constant pnnx_7960 0 1 22473 value=0 prim::Constant pnnx_7961 0 1 22474 value=2 torch.permute torch.permute_2681 2 1 8580 8581 qkv1.7 $input=8580 $dims=8581 #8580=(36,64,3,6,32)f32 #qkv1.7=(3,36,6,64,32)f32 Tensor.select Tensor.select_764 3 1 qkv1.7 22469 22470 q.79 $input=qkv1.7 $dim=22469 $index=22470 #qkv1.7=(3,36,6,64,32)f32 #q.79=(36,6,64,32)f32 aten::mul pnnx_7963 2 1 q.79 8546 q1.7 #q.79=(36,6,64,32)f32 #q1.7=(36,6,64,32)f32 Tensor.select Tensor.select_765 3 1 qkv1.7 22471 22472 k.79 $input=qkv1.7 $dim=22471 $index=22472 #qkv1.7=(3,36,6,64,32)f32 #k.79=(36,6,64,32)f32 prim::Constant pnnx_7966 0 1 22475 value=-1 prim::ListConstruct pnnx_7967 1 1 22475 8589 Tensor.view Tensor.view_1403 2 1 relative_position_index.79 8589 8590 $input=relative_position_index.79 $shape=8589 #relative_position_index.79=(64,64)i64 #8590=(4096)i64 prim::ListConstruct pnnx_7969 1 1 8590 8591 #8590=(4096)i64 prim::Constant pnnx_7971 0 1 22476 value=64 prim::Constant pnnx_7972 0 1 22477 value=-1 prim::ListConstruct pnnx_7973 3 1 8557 22476 22477 8593 Tensor.index Tensor.index_364 2 1 relative_position_bias_table.79 8591 8592 $input=relative_position_bias_table.79 $expr=8591 #relative_position_bias_table.79=(225,6)f32 #8592=(4096,6)f32 prim::Constant pnnx_7975 0 1 22478 value=2 prim::Constant pnnx_7976 0 1 22479 value=0 prim::Constant pnnx_7977 0 1 22480 value=1 prim::ListConstruct pnnx_7978 3 1 22478 22479 22480 8595 Tensor.view Tensor.view_1404 2 1 8592 8593 relative_position_bias.79 $input=8592 $shape=8593 #8592=(4096,6)f32 #relative_position_bias.79=(64,64,6)f32 prim::Constant pnnx_7982 0 1 22482 value=0 torch.permute torch.permute_2682 2 1 relative_position_bias.79 8595 8596 $input=relative_position_bias.79 $dims=8595 #relative_position_bias.79=(64,64,6)f32 #8596=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_117 1 1 8596 relative_position_bias1.7 memory_format=torch.contiguous_format $input=8596 #8596=(6,64,64)f32 #relative_position_bias1.7=(6,64,64)f32 prim::Constant pnnx_7984 0 1 22483 value=1 torch.transpose torch.transpose_3051 3 1 k.79 8555 8556 8587 $input=k.79 $dim0=8555 $dim1=8556 #k.79=(36,6,64,32)f32 #8587=(36,6,32,64)f32 torch.matmul torch.matmul_2280 2 1 q1.7 8587 attn.159 $input=q1.7 $other=8587 #q1.7=(36,6,64,32)f32 #8587=(36,6,32,64)f32 #attn.159=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3294 2 1 relative_position_bias1.7 22482 8598 $input=relative_position_bias1.7 $dim=22482 #relative_position_bias1.7=(6,64,64)f32 #8598=(1,6,64,64)f32 aten::add pnnx_7985 3 1 attn.159 8598 22483 input.179 #attn.159=(36,6,64,64)f32 #8598=(1,6,64,64)f32 #input.179=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.2.attn.softmax 1 1 input.179 8600 dim=-1 #input.179=(36,6,64,64)f32 #8600=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.attn.attn_drop 1 1 8600 8601 #8600=(36,6,64,64)f32 #8601=(36,6,64,64)f32 Tensor.select Tensor.select_766 3 1 qkv1.7 22473 22474 v.79 $input=qkv1.7 $dim=22473 $index=22474 #qkv1.7=(3,36,6,64,32)f32 #v.79=(36,6,64,32)f32 prim::Constant pnnx_7987 0 1 22484 value=1 prim::Constant pnnx_7988 0 1 22485 value=2 torch.matmul torch.matmul_2281 2 1 8601 v.79 8602 $input=8601 $other=v.79 #8601=(36,6,64,64)f32 #v.79=(36,6,64,32)f32 #8602=(36,6,64,32)f32 prim::ListConstruct pnnx_7990 3 1 8567 8571 8575 8604 torch.transpose torch.transpose_3052 3 1 8602 22484 22485 8603 $input=8602 $dim0=22484 $dim1=22485 #8602=(36,6,64,32)f32 #8603=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_511 2 1 8603 8604 input1.9 $input=8603 $shape=8604 #8603=(36,64,6,32)f32 #input1.9=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.2.attn.proj 1 1 input1.9 8606 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.9=(36,64,192)f32 #8606=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.attn.proj_drop 1 1 8606 8607 #8606=(36,64,192)f32 #8607=(36,64,192)f32 prim::Constant pnnx_7992 0 1 22486 value=-1 prim::Constant pnnx_7993 0 1 22487 value=8 prim::Constant pnnx_7994 0 1 22488 value=8 prim::ListConstruct pnnx_7995 4 1 22486 22487 22488 8516 8608 prim::Constant pnnx_7997 0 1 22489 value=8 prim::Constant pnnx_7998 0 1 22490 value=trunc aten::div pnnx_7999 3 1 H0.1 22489 22490 8610 aten::Int pnnx_8000 1 1 8610 8611 prim::Constant pnnx_8001 0 1 22491 value=8 prim::Constant pnnx_8002 0 1 22492 value=trunc aten::div pnnx_8003 3 1 W0.1 22491 22492 8612 aten::Int pnnx_8004 1 1 8612 8613 prim::Constant pnnx_8005 0 1 22493 value=1 prim::Constant pnnx_8006 0 1 22494 value=8 prim::Constant pnnx_8007 0 1 22495 value=8 prim::Constant pnnx_8008 0 1 22496 value=-1 prim::ListConstruct pnnx_8009 6 1 22493 8611 8613 22494 22495 22496 8614 prim::Constant pnnx_8011 0 1 22497 value=0 prim::Constant pnnx_8012 0 1 22498 value=1 prim::Constant pnnx_8013 0 1 22499 value=3 prim::Constant pnnx_8014 0 1 22500 value=2 prim::Constant pnnx_8015 0 1 22501 value=4 prim::Constant pnnx_8016 0 1 22502 value=5 prim::ListConstruct pnnx_8017 6 1 22497 22498 22499 22500 22501 22502 8616 Tensor.view Tensor.view_1405 2 1 8607 8608 windows.79 $input=8607 $shape=8608 #8607=(36,64,192)f32 #windows.79=(36,8,8,192)f32 Tensor.view Tensor.view_1406 2 1 windows.79 8614 x7.7 $input=windows.79 $shape=8614 #windows.79=(36,8,8,192)f32 #x7.7=(1,6,6,8,8,192)f32 prim::Constant pnnx_8021 0 1 22504 value=1 prim::Constant pnnx_8022 0 1 22505 value=-1 prim::ListConstruct pnnx_8023 4 1 22504 1067 1307 22505 8619 torch.permute torch.permute_2683 2 1 x7.7 8616 8617 $input=x7.7 $dims=8616 #x7.7=(1,6,6,8,8,192)f32 #8617=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_118 1 1 8617 8618 memory_format=torch.contiguous_format $input=8617 #8617=(1,6,8,6,8,192)f32 #8618=(1,6,8,6,8,192)f32 aten::mul pnnx_8025 2 1 H0.1 W0.1 8621 aten::Int pnnx_8026 1 1 8621 8622 prim::ListConstruct pnnx_8027 3 1 8511 8622 8515 8623 prim::Constant pnnx_8029 0 1 8625 value=None prim::Constant pnnx_8030 0 1 22506 value=1 Tensor.view Tensor.view_1407 2 1 8618 8619 x8.7 $input=8618 $shape=8619 #8618=(1,6,8,6,8,192)f32 #x8.7=(1,48,48,192)f32 Tensor.view Tensor.view_1408 2 1 x8.7 8623 x9.7 $input=x8.7 $shape=8623 #x8.7=(1,48,48,192)f32 #x9.7=(1,2304,192)f32 aten::add pnnx_8031 3 1 8492 x9.7 22506 input.181 #8492=(1,2304,192)f32 #x9.7=(1,2304,192)f32 #input.181=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.2.norm2 1 1 input.181 8627 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.181=(1,2304,192)f32 #8627=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.2.mlp.fc1 1 1 8627 8632 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #8627=(1,2304,192)f32 #8632=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.2.mlp.act 1 1 8632 8633 #8632=(1,2304,384)f32 #8633=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.mlp.drop 1 1 8633 8634 #8633=(1,2304,384)f32 #8634=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.2.mlp.fc2 1 1 8634 8635 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #8634=(1,2304,384)f32 #8635=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.2.mlp.drop 1 1 8635 8636 #8635=(1,2304,192)f32 #8636=(1,2304,192)f32 prim::Constant pnnx_8032 0 1 8637 value=None prim::Constant pnnx_8033 0 1 22507 value=1 aten::add pnnx_8034 3 1 input.181 8636 22507 8638 #input.181=(1,2304,192)f32 #8636=(1,2304,192)f32 #8638=(1,2304,192)f32 prim::Constant pnnx_8035 0 1 8639 value=trunc prim::Constant pnnx_8036 0 1 8640 value=8 prim::Constant pnnx_8037 0 1 8641 value=0 prim::Constant pnnx_8038 0 1 8642 value=2 prim::Constant pnnx_8039 0 1 8643 value=-4 prim::Constant pnnx_8040 0 1 8644 value=1 prim::Constant pnnx_8041 0 1 8645 value=3 prim::Constant pnnx_8042 0 1 8646 value=8 prim::Constant pnnx_8043 0 1 8647 value=4 prim::Constant pnnx_8044 0 1 8648 value=5 prim::Constant pnnx_8045 0 1 8649 value=-1 prim::Constant pnnx_8046 0 1 8650 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.3 0 1 attn_mask.41 @attn_mask=(36,64,64)f32 #attn_mask.41=(36,64,64)f32 aten::size pnnx_8047 2 1 8638 8641 8657 #8638=(1,2304,192)f32 prim::NumToTensor pnnx_8048 1 1 8657 B.97 aten::Int pnnx_8049 1 1 B.97 8659 aten::Int pnnx_8050 1 1 B.97 8660 aten::size pnnx_8051 2 1 8638 8642 8661 #8638=(1,2304,192)f32 prim::NumToTensor pnnx_8052 1 1 8661 C.167 aten::Int pnnx_8053 1 1 C.167 8663 aten::Int pnnx_8054 1 1 C.167 8664 aten::Int pnnx_8055 1 1 C.167 8665 aten::Int pnnx_8056 1 1 C.167 8666 nn.LayerNorm layers_dfe.0.residual_group.blocks.3.norm1 1 1 8638 8667 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #8638=(1,2304,192)f32 #8667=(1,2304,192)f32 prim::ListConstruct pnnx_8057 4 1 8660 1064 1304 8666 8668 prim::Constant pnnx_8059 0 1 22508 value=-4 prim::ListConstruct pnnx_8060 2 1 8643 22508 8670 prim::Constant pnnx_8061 0 1 22509 value=2 prim::ListConstruct pnnx_8062 2 1 8644 22509 8671 Tensor.view Tensor.view_1409 2 1 8667 8668 x.81 $input=8667 $shape=8668 #8667=(1,2304,192)f32 #x.81=(1,48,48,192)f32 prim::Constant pnnx_8064 0 1 22510 value=0 torch.roll torch.roll_2458 3 1 x.81 8670 8671 x6.9 $input=x.81 $shifts=8670 $dims=8671 #x.81=(1,48,48,192)f32 #x6.9=(1,48,48,192)f32 aten::size pnnx_8065 2 1 x6.9 22510 8673 #x6.9=(1,48,48,192)f32 prim::NumToTensor pnnx_8066 1 1 8673 B1.9 aten::Int pnnx_8067 1 1 B1.9 8675 prim::Constant pnnx_8068 0 1 22511 value=1 aten::size pnnx_8069 2 1 x6.9 22511 8676 #x6.9=(1,48,48,192)f32 prim::NumToTensor pnnx_8070 1 1 8676 8677 prim::Constant pnnx_8071 0 1 22512 value=2 aten::size pnnx_8072 2 1 x6.9 22512 8678 #x6.9=(1,48,48,192)f32 prim::NumToTensor pnnx_8073 1 1 8678 8679 aten::size pnnx_8074 2 1 x6.9 8645 8680 #x6.9=(1,48,48,192)f32 prim::NumToTensor pnnx_8075 1 1 8680 C1.9 aten::Int pnnx_8076 1 1 C1.9 8682 aten::Int pnnx_8077 1 1 C1.9 8683 aten::div pnnx_8078 3 1 8677 8640 8639 8684 aten::Int pnnx_8079 1 1 8684 8685 prim::Constant pnnx_8080 0 1 22513 value=8 prim::Constant pnnx_8081 0 1 22514 value=trunc aten::div pnnx_8082 3 1 8679 22513 22514 8686 aten::Int pnnx_8083 1 1 8686 8687 prim::Constant pnnx_8084 0 1 22515 value=8 prim::ListConstruct pnnx_8085 6 1 8675 8685 8646 8687 22515 8683 8688 prim::Constant pnnx_8087 0 1 22516 value=0 prim::Constant pnnx_8088 0 1 22517 value=1 prim::Constant pnnx_8089 0 1 22518 value=3 prim::Constant pnnx_8090 0 1 22519 value=2 prim::ListConstruct pnnx_8091 6 1 22516 22517 22518 22519 8647 8648 8690 Tensor.view Tensor.view_1410 2 1 x6.9 8688 x7.9 $input=x6.9 $shape=8688 #x6.9=(1,48,48,192)f32 #x7.9=(1,6,8,6,8,192)f32 prim::Constant pnnx_8095 0 1 22521 value=8 prim::Constant pnnx_8096 0 1 22522 value=8 prim::ListConstruct pnnx_8097 4 1 8649 22521 22522 8682 8693 torch.permute torch.permute_2684 2 1 x7.9 8690 8691 $input=x7.9 $dims=8690 #x7.9=(1,6,8,6,8,192)f32 #8691=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_119 1 1 8691 8692 memory_format=torch.contiguous_format $input=8691 #8691=(1,6,6,8,8,192)f32 #8692=(1,6,6,8,8,192)f32 prim::Constant pnnx_8099 0 1 22523 value=-1 prim::ListConstruct pnnx_8100 3 1 22523 8650 8665 8695 prim::Constant pnnx_8102 0 1 8697 value=1.767767e-01 prim::Constant pnnx_8103 0 1 8698 value=trunc prim::Constant pnnx_8104 0 1 8699 value=6 prim::Constant pnnx_8105 0 1 8700 value=0 prim::Constant pnnx_8106 0 1 8701 value=1 prim::Constant pnnx_8107 0 1 8702 value=2 prim::Constant pnnx_8108 0 1 8703 value=3 prim::Constant pnnx_8109 0 1 8704 value=6 prim::Constant pnnx_8110 0 1 8705 value=4 prim::Constant pnnx_8111 0 1 8706 value=-2 prim::Constant pnnx_8112 0 1 8707 value=-1 prim::Constant pnnx_8113 0 1 8708 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.3.attn 0 1 relative_position_bias_table.81 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.81=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.3.attn 0 1 relative_position_index.81 @relative_position_index=(64,64)i64 #relative_position_index.81=(64,64)i64 Tensor.view Tensor.view_1411 2 1 8692 8693 x_windows.81 $input=8692 $shape=8693 #8692=(1,6,6,8,8,192)f32 #x_windows.81=(36,8,8,192)f32 Tensor.view Tensor.view_1412 2 1 x_windows.81 8695 x8.9 $input=x_windows.81 $shape=8695 #x_windows.81=(36,8,8,192)f32 #x8.9=(36,64,192)f32 aten::size pnnx_8114 2 1 x8.9 8700 8716 #x8.9=(36,64,192)f32 prim::NumToTensor pnnx_8115 1 1 8716 B_.81 aten::Int pnnx_8116 1 1 B_.81 8718 aten::Int pnnx_8117 1 1 B_.81 8719 aten::size pnnx_8118 2 1 x8.9 8701 8720 #x8.9=(36,64,192)f32 prim::NumToTensor pnnx_8119 1 1 8720 N.81 aten::Int pnnx_8120 1 1 N.81 8722 aten::Int pnnx_8121 1 1 N.81 8723 aten::Int pnnx_8122 1 1 N.81 8724 aten::Int pnnx_8123 1 1 N.81 8725 aten::Int pnnx_8124 1 1 N.81 8726 aten::Int pnnx_8125 1 1 N.81 8727 aten::size pnnx_8126 2 1 x8.9 8702 8728 #x8.9=(36,64,192)f32 prim::NumToTensor pnnx_8127 1 1 8728 C.169 aten::Int pnnx_8128 1 1 C.169 8730 nn.Linear layers_dfe.0.residual_group.blocks.3.attn.qkv 1 1 x8.9 8731 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.9=(36,64,192)f32 #8731=(36,64,576)f32 aten::div pnnx_8129 3 1 C.169 8699 8698 8732 aten::Int pnnx_8130 1 1 8732 8733 prim::ListConstruct pnnx_8131 5 1 8719 8727 8703 8704 8733 8734 prim::Constant pnnx_8133 0 1 22524 value=2 prim::Constant pnnx_8134 0 1 22525 value=0 prim::Constant pnnx_8135 0 1 22526 value=3 prim::Constant pnnx_8136 0 1 22527 value=1 prim::ListConstruct pnnx_8137 5 1 22524 22525 22526 22527 8705 8736 Tensor.reshape Tensor.reshape_512 2 1 8731 8734 8735 $input=8731 $shape=8734 #8731=(36,64,576)f32 #8735=(36,64,3,6,32)f32 prim::Constant pnnx_8139 0 1 22528 value=0 prim::Constant pnnx_8140 0 1 22529 value=0 prim::Constant pnnx_8142 0 1 22530 value=0 prim::Constant pnnx_8143 0 1 22531 value=1 prim::Constant pnnx_8145 0 1 22532 value=0 prim::Constant pnnx_8146 0 1 22533 value=2 torch.permute torch.permute_2685 2 1 8735 8736 qkv1.9 $input=8735 $dims=8736 #8735=(36,64,3,6,32)f32 #qkv1.9=(3,36,6,64,32)f32 Tensor.select Tensor.select_767 3 1 qkv1.9 22528 22529 q.81 $input=qkv1.9 $dim=22528 $index=22529 #qkv1.9=(3,36,6,64,32)f32 #q.81=(36,6,64,32)f32 aten::mul pnnx_8148 2 1 q.81 8697 q1.9 #q.81=(36,6,64,32)f32 #q1.9=(36,6,64,32)f32 Tensor.select Tensor.select_768 3 1 qkv1.9 22530 22531 k.81 $input=qkv1.9 $dim=22530 $index=22531 #qkv1.9=(3,36,6,64,32)f32 #k.81=(36,6,64,32)f32 prim::Constant pnnx_8151 0 1 22534 value=-1 prim::ListConstruct pnnx_8152 1 1 22534 8744 Tensor.view Tensor.view_1413 2 1 relative_position_index.81 8744 8745 $input=relative_position_index.81 $shape=8744 #relative_position_index.81=(64,64)i64 #8745=(4096)i64 prim::ListConstruct pnnx_8154 1 1 8745 8746 #8745=(4096)i64 prim::Constant pnnx_8156 0 1 22535 value=64 prim::Constant pnnx_8157 0 1 22536 value=-1 prim::ListConstruct pnnx_8158 3 1 8708 22535 22536 8748 Tensor.index Tensor.index_365 2 1 relative_position_bias_table.81 8746 8747 $input=relative_position_bias_table.81 $expr=8746 #relative_position_bias_table.81=(225,6)f32 #8747=(4096,6)f32 prim::Constant pnnx_8160 0 1 22537 value=2 prim::Constant pnnx_8161 0 1 22538 value=0 prim::Constant pnnx_8162 0 1 22539 value=1 prim::ListConstruct pnnx_8163 3 1 22537 22538 22539 8750 Tensor.view Tensor.view_1414 2 1 8747 8748 relative_position_bias.81 $input=8747 $shape=8748 #8747=(4096,6)f32 #relative_position_bias.81=(64,64,6)f32 prim::Constant pnnx_8167 0 1 22541 value=0 torch.permute torch.permute_2686 2 1 relative_position_bias.81 8750 8751 $input=relative_position_bias.81 $dims=8750 #relative_position_bias.81=(64,64,6)f32 #8751=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_120 1 1 8751 relative_position_bias1.9 memory_format=torch.contiguous_format $input=8751 #8751=(6,64,64)f32 #relative_position_bias1.9=(6,64,64)f32 prim::Constant pnnx_8169 0 1 22542 value=1 torch.transpose torch.transpose_3053 3 1 k.81 8706 8707 8742 $input=k.81 $dim0=8706 $dim1=8707 #k.81=(36,6,64,32)f32 #8742=(36,6,32,64)f32 torch.matmul torch.matmul_2282 2 1 q1.9 8742 attn.163 $input=q1.9 $other=8742 #q1.9=(36,6,64,32)f32 #8742=(36,6,32,64)f32 #attn.163=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3295 2 1 relative_position_bias1.9 22541 8753 $input=relative_position_bias1.9 $dim=22541 #relative_position_bias1.9=(6,64,64)f32 #8753=(1,6,64,64)f32 aten::add pnnx_8170 3 1 attn.163 8753 22542 attn2.5 #attn.163=(36,6,64,64)f32 #8753=(1,6,64,64)f32 #attn2.5=(36,6,64,64)f32 prim::Constant pnnx_8171 0 1 22543 value=0 aten::size pnnx_8172 2 1 attn_mask.41 22543 8755 #attn_mask.41=(36,64,64)f32 prim::NumToTensor pnnx_8173 1 1 8755 other.41 aten::Int pnnx_8174 1 1 other.41 8757 prim::Constant pnnx_8175 0 1 22544 value=trunc aten::div pnnx_8176 3 1 B_.81 other.41 22544 8758 aten::Int pnnx_8177 1 1 8758 8759 prim::Constant pnnx_8178 0 1 22545 value=6 prim::ListConstruct pnnx_8179 5 1 8759 8757 22545 8726 8725 8760 prim::Constant pnnx_8181 0 1 22546 value=1 prim::Constant pnnx_8183 0 1 22547 value=0 prim::Constant pnnx_8185 0 1 22548 value=1 Tensor.view Tensor.view_1415 2 1 attn2.5 8760 8761 $input=attn2.5 $shape=8760 #attn2.5=(36,6,64,64)f32 #8761=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3296 2 1 attn_mask.41 22546 8762 $input=attn_mask.41 $dim=22546 #attn_mask.41=(36,64,64)f32 #8762=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3297 2 1 8762 22547 8763 $input=8762 $dim=22547 #8762=(36,1,64,64)f32 #8763=(1,36,1,64,64)f32 aten::add pnnx_8186 3 1 8761 8763 22548 attn3.5 #8761=(1,36,6,64,64)f32 #8763=(1,36,1,64,64)f32 #attn3.5=(1,36,6,64,64)f32 prim::Constant pnnx_8187 0 1 22549 value=-1 prim::Constant pnnx_8188 0 1 22550 value=6 prim::ListConstruct pnnx_8189 4 1 22549 22550 8724 8723 8765 Tensor.view Tensor.view_1416 2 1 attn3.5 8765 input.183 $input=attn3.5 $shape=8765 #attn3.5=(1,36,6,64,64)f32 #input.183=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.3.attn.softmax 1 1 input.183 8767 dim=-1 #input.183=(36,6,64,64)f32 #8767=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.attn.attn_drop 1 1 8767 8768 #8767=(36,6,64,64)f32 #8768=(36,6,64,64)f32 Tensor.select Tensor.select_769 3 1 qkv1.9 22532 22533 v.81 $input=qkv1.9 $dim=22532 $index=22533 #qkv1.9=(3,36,6,64,32)f32 #v.81=(36,6,64,32)f32 prim::Constant pnnx_8192 0 1 22551 value=1 prim::Constant pnnx_8193 0 1 22552 value=2 torch.matmul torch.matmul_2283 2 1 8768 v.81 8769 $input=8768 $other=v.81 #8768=(36,6,64,64)f32 #v.81=(36,6,64,32)f32 #8769=(36,6,64,32)f32 prim::ListConstruct pnnx_8195 3 1 8718 8722 8730 8771 torch.transpose torch.transpose_3054 3 1 8769 22551 22552 8770 $input=8769 $dim0=22551 $dim1=22552 #8769=(36,6,64,32)f32 #8770=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_513 2 1 8770 8771 input1.11 $input=8770 $shape=8771 #8770=(36,64,6,32)f32 #input1.11=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.3.attn.proj 1 1 input1.11 8773 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.11=(36,64,192)f32 #8773=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.attn.proj_drop 1 1 8773 8774 #8773=(36,64,192)f32 #8774=(36,64,192)f32 prim::Constant pnnx_8197 0 1 22553 value=-1 prim::Constant pnnx_8198 0 1 22554 value=8 prim::Constant pnnx_8199 0 1 22555 value=8 prim::ListConstruct pnnx_8200 4 1 22553 22554 22555 8664 8775 prim::Constant pnnx_8202 0 1 22556 value=8 prim::Constant pnnx_8203 0 1 22557 value=trunc aten::div pnnx_8204 3 1 H0.1 22556 22557 8777 aten::Int pnnx_8205 1 1 8777 8778 prim::Constant pnnx_8206 0 1 22558 value=8 prim::Constant pnnx_8207 0 1 22559 value=trunc aten::div pnnx_8208 3 1 W0.1 22558 22559 8779 aten::Int pnnx_8209 1 1 8779 8780 prim::Constant pnnx_8210 0 1 22560 value=1 prim::Constant pnnx_8211 0 1 22561 value=8 prim::Constant pnnx_8212 0 1 22562 value=8 prim::Constant pnnx_8213 0 1 22563 value=-1 prim::ListConstruct pnnx_8214 6 1 22560 8778 8780 22561 22562 22563 8781 prim::Constant pnnx_8216 0 1 22564 value=0 prim::Constant pnnx_8217 0 1 22565 value=1 prim::Constant pnnx_8218 0 1 22566 value=3 prim::Constant pnnx_8219 0 1 22567 value=2 prim::Constant pnnx_8220 0 1 22568 value=4 prim::Constant pnnx_8221 0 1 22569 value=5 prim::ListConstruct pnnx_8222 6 1 22564 22565 22566 22567 22568 22569 8783 Tensor.view Tensor.view_1417 2 1 8774 8775 windows.81 $input=8774 $shape=8775 #8774=(36,64,192)f32 #windows.81=(36,8,8,192)f32 Tensor.view Tensor.view_1418 2 1 windows.81 8781 x9.9 $input=windows.81 $shape=8781 #windows.81=(36,8,8,192)f32 #x9.9=(1,6,6,8,8,192)f32 prim::Constant pnnx_8226 0 1 22571 value=1 prim::Constant pnnx_8227 0 1 22572 value=-1 prim::ListConstruct pnnx_8228 4 1 22571 1061 1301 22572 8786 torch.permute torch.permute_2687 2 1 x9.9 8783 8784 $input=x9.9 $dims=8783 #x9.9=(1,6,6,8,8,192)f32 #8784=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_121 1 1 8784 8785 memory_format=torch.contiguous_format $input=8784 #8784=(1,6,8,6,8,192)f32 #8785=(1,6,8,6,8,192)f32 prim::Constant pnnx_8230 0 1 22573 value=4 prim::Constant pnnx_8231 0 1 22574 value=4 prim::ListConstruct pnnx_8232 2 1 22573 22574 8788 prim::Constant pnnx_8233 0 1 22575 value=1 prim::Constant pnnx_8234 0 1 22576 value=2 prim::ListConstruct pnnx_8235 2 1 22575 22576 8789 Tensor.view Tensor.view_1419 2 1 8785 8786 shifted_x.41 $input=8785 $shape=8786 #8785=(1,6,8,6,8,192)f32 #shifted_x.41=(1,48,48,192)f32 aten::mul pnnx_8237 2 1 H0.1 W0.1 8791 aten::Int pnnx_8238 1 1 8791 8792 prim::ListConstruct pnnx_8239 3 1 8659 8792 8663 8793 prim::Constant pnnx_8241 0 1 8795 value=None prim::Constant pnnx_8242 0 1 22577 value=1 torch.roll torch.roll_2459 3 1 shifted_x.41 8788 8789 x10.5 $input=shifted_x.41 $shifts=8788 $dims=8789 #shifted_x.41=(1,48,48,192)f32 #x10.5=(1,48,48,192)f32 Tensor.view Tensor.view_1420 2 1 x10.5 8793 x11.5 $input=x10.5 $shape=8793 #x10.5=(1,48,48,192)f32 #x11.5=(1,2304,192)f32 aten::add pnnx_8243 3 1 8638 x11.5 22577 input.185 #8638=(1,2304,192)f32 #x11.5=(1,2304,192)f32 #input.185=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.3.norm2 1 1 input.185 8797 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.185=(1,2304,192)f32 #8797=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.3.mlp.fc1 1 1 8797 8802 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #8797=(1,2304,192)f32 #8802=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.3.mlp.act 1 1 8802 8803 #8802=(1,2304,384)f32 #8803=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.mlp.drop 1 1 8803 8804 #8803=(1,2304,384)f32 #8804=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.3.mlp.fc2 1 1 8804 8805 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #8804=(1,2304,384)f32 #8805=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.3.mlp.drop 1 1 8805 8806 #8805=(1,2304,192)f32 #8806=(1,2304,192)f32 prim::Constant pnnx_8244 0 1 8807 value=None prim::Constant pnnx_8245 0 1 22578 value=1 aten::add pnnx_8246 3 1 input.185 8806 22578 8808 #input.185=(1,2304,192)f32 #8806=(1,2304,192)f32 #8808=(1,2304,192)f32 prim::Constant pnnx_8247 0 1 8809 value=trunc prim::Constant pnnx_8248 0 1 8810 value=8 prim::Constant pnnx_8249 0 1 8811 value=0 prim::Constant pnnx_8250 0 1 8812 value=2 prim::Constant pnnx_8251 0 1 8813 value=1 prim::Constant pnnx_8252 0 1 8814 value=3 prim::Constant pnnx_8253 0 1 8815 value=8 prim::Constant pnnx_8254 0 1 8816 value=4 prim::Constant pnnx_8255 0 1 8817 value=5 prim::Constant pnnx_8256 0 1 8818 value=-1 prim::Constant pnnx_8257 0 1 8819 value=64 aten::size pnnx_8258 2 1 8808 8811 8825 #8808=(1,2304,192)f32 prim::NumToTensor pnnx_8259 1 1 8825 B.99 aten::Int pnnx_8260 1 1 B.99 8827 aten::Int pnnx_8261 1 1 B.99 8828 aten::size pnnx_8262 2 1 8808 8812 8829 #8808=(1,2304,192)f32 prim::NumToTensor pnnx_8263 1 1 8829 C.171 aten::Int pnnx_8264 1 1 C.171 8831 aten::Int pnnx_8265 1 1 C.171 8832 aten::Int pnnx_8266 1 1 C.171 8833 aten::Int pnnx_8267 1 1 C.171 8834 nn.LayerNorm layers_dfe.0.residual_group.blocks.4.norm1 1 1 8808 8835 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #8808=(1,2304,192)f32 #8835=(1,2304,192)f32 prim::ListConstruct pnnx_8268 4 1 8828 1058 1298 8834 8836 prim::Constant pnnx_8270 0 1 22579 value=0 Tensor.view Tensor.view_1421 2 1 8835 8836 x.83 $input=8835 $shape=8836 #8835=(1,2304,192)f32 #x.83=(1,48,48,192)f32 aten::size pnnx_8271 2 1 x.83 22579 8838 #x.83=(1,48,48,192)f32 prim::NumToTensor pnnx_8272 1 1 8838 B1.11 aten::Int pnnx_8273 1 1 B1.11 8840 aten::size pnnx_8274 2 1 x.83 8813 8841 #x.83=(1,48,48,192)f32 prim::NumToTensor pnnx_8275 1 1 8841 8842 prim::Constant pnnx_8276 0 1 22580 value=2 aten::size pnnx_8277 2 1 x.83 22580 8843 #x.83=(1,48,48,192)f32 prim::NumToTensor pnnx_8278 1 1 8843 8844 aten::size pnnx_8279 2 1 x.83 8814 8845 #x.83=(1,48,48,192)f32 prim::NumToTensor pnnx_8280 1 1 8845 C1.11 aten::Int pnnx_8281 1 1 C1.11 8847 aten::Int pnnx_8282 1 1 C1.11 8848 aten::div pnnx_8283 3 1 8842 8810 8809 8849 aten::Int pnnx_8284 1 1 8849 8850 prim::Constant pnnx_8285 0 1 22581 value=8 prim::Constant pnnx_8286 0 1 22582 value=trunc aten::div pnnx_8287 3 1 8844 22581 22582 8851 aten::Int pnnx_8288 1 1 8851 8852 prim::Constant pnnx_8289 0 1 22583 value=8 prim::ListConstruct pnnx_8290 6 1 8840 8850 8815 8852 22583 8848 8853 prim::Constant pnnx_8292 0 1 22584 value=0 prim::Constant pnnx_8293 0 1 22585 value=1 prim::Constant pnnx_8294 0 1 22586 value=3 prim::Constant pnnx_8295 0 1 22587 value=2 prim::ListConstruct pnnx_8296 6 1 22584 22585 22586 22587 8816 8817 8855 Tensor.view Tensor.view_1422 2 1 x.83 8853 x5.43 $input=x.83 $shape=8853 #x.83=(1,48,48,192)f32 #x5.43=(1,6,8,6,8,192)f32 prim::Constant pnnx_8300 0 1 22589 value=8 prim::Constant pnnx_8301 0 1 22590 value=8 prim::ListConstruct pnnx_8302 4 1 8818 22589 22590 8847 8858 torch.permute torch.permute_2688 2 1 x5.43 8855 8856 $input=x5.43 $dims=8855 #x5.43=(1,6,8,6,8,192)f32 #8856=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_122 1 1 8856 8857 memory_format=torch.contiguous_format $input=8856 #8856=(1,6,6,8,8,192)f32 #8857=(1,6,6,8,8,192)f32 prim::Constant pnnx_8304 0 1 22591 value=-1 prim::ListConstruct pnnx_8305 3 1 22591 8819 8833 8860 prim::Constant pnnx_8307 0 1 8862 value=1.767767e-01 prim::Constant pnnx_8308 0 1 8863 value=trunc prim::Constant pnnx_8309 0 1 8864 value=6 prim::Constant pnnx_8310 0 1 8865 value=0 prim::Constant pnnx_8311 0 1 8866 value=1 prim::Constant pnnx_8312 0 1 8867 value=2 prim::Constant pnnx_8313 0 1 8868 value=3 prim::Constant pnnx_8314 0 1 8869 value=6 prim::Constant pnnx_8315 0 1 8870 value=4 prim::Constant pnnx_8316 0 1 8871 value=-2 prim::Constant pnnx_8317 0 1 8872 value=-1 prim::Constant pnnx_8318 0 1 8873 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.4.attn 0 1 relative_position_bias_table.83 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.83=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.4.attn 0 1 relative_position_index.83 @relative_position_index=(64,64)i64 #relative_position_index.83=(64,64)i64 Tensor.view Tensor.view_1423 2 1 8857 8858 x_windows.83 $input=8857 $shape=8858 #8857=(1,6,6,8,8,192)f32 #x_windows.83=(36,8,8,192)f32 Tensor.view Tensor.view_1424 2 1 x_windows.83 8860 x6.11 $input=x_windows.83 $shape=8860 #x_windows.83=(36,8,8,192)f32 #x6.11=(36,64,192)f32 aten::size pnnx_8319 2 1 x6.11 8865 8881 #x6.11=(36,64,192)f32 prim::NumToTensor pnnx_8320 1 1 8881 B_.83 aten::Int pnnx_8321 1 1 B_.83 8883 aten::Int pnnx_8322 1 1 B_.83 8884 aten::size pnnx_8323 2 1 x6.11 8866 8885 #x6.11=(36,64,192)f32 prim::NumToTensor pnnx_8324 1 1 8885 N.83 aten::Int pnnx_8325 1 1 N.83 8887 aten::Int pnnx_8326 1 1 N.83 8888 aten::size pnnx_8327 2 1 x6.11 8867 8889 #x6.11=(36,64,192)f32 prim::NumToTensor pnnx_8328 1 1 8889 C.173 aten::Int pnnx_8329 1 1 C.173 8891 nn.Linear layers_dfe.0.residual_group.blocks.4.attn.qkv 1 1 x6.11 8892 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.11=(36,64,192)f32 #8892=(36,64,576)f32 aten::div pnnx_8330 3 1 C.173 8864 8863 8893 aten::Int pnnx_8331 1 1 8893 8894 prim::ListConstruct pnnx_8332 5 1 8884 8888 8868 8869 8894 8895 prim::Constant pnnx_8334 0 1 22592 value=2 prim::Constant pnnx_8335 0 1 22593 value=0 prim::Constant pnnx_8336 0 1 22594 value=3 prim::Constant pnnx_8337 0 1 22595 value=1 prim::ListConstruct pnnx_8338 5 1 22592 22593 22594 22595 8870 8897 Tensor.reshape Tensor.reshape_514 2 1 8892 8895 8896 $input=8892 $shape=8895 #8892=(36,64,576)f32 #8896=(36,64,3,6,32)f32 prim::Constant pnnx_8340 0 1 22596 value=0 prim::Constant pnnx_8341 0 1 22597 value=0 prim::Constant pnnx_8343 0 1 22598 value=0 prim::Constant pnnx_8344 0 1 22599 value=1 prim::Constant pnnx_8346 0 1 22600 value=0 prim::Constant pnnx_8347 0 1 22601 value=2 torch.permute torch.permute_2689 2 1 8896 8897 qkv1.11 $input=8896 $dims=8897 #8896=(36,64,3,6,32)f32 #qkv1.11=(3,36,6,64,32)f32 Tensor.select Tensor.select_770 3 1 qkv1.11 22596 22597 q.83 $input=qkv1.11 $dim=22596 $index=22597 #qkv1.11=(3,36,6,64,32)f32 #q.83=(36,6,64,32)f32 aten::mul pnnx_8349 2 1 q.83 8862 q1.11 #q.83=(36,6,64,32)f32 #q1.11=(36,6,64,32)f32 Tensor.select Tensor.select_771 3 1 qkv1.11 22598 22599 k.83 $input=qkv1.11 $dim=22598 $index=22599 #qkv1.11=(3,36,6,64,32)f32 #k.83=(36,6,64,32)f32 prim::Constant pnnx_8352 0 1 22602 value=-1 prim::ListConstruct pnnx_8353 1 1 22602 8905 Tensor.view Tensor.view_1425 2 1 relative_position_index.83 8905 8906 $input=relative_position_index.83 $shape=8905 #relative_position_index.83=(64,64)i64 #8906=(4096)i64 prim::ListConstruct pnnx_8355 1 1 8906 8907 #8906=(4096)i64 prim::Constant pnnx_8357 0 1 22603 value=64 prim::Constant pnnx_8358 0 1 22604 value=-1 prim::ListConstruct pnnx_8359 3 1 8873 22603 22604 8909 Tensor.index Tensor.index_366 2 1 relative_position_bias_table.83 8907 8908 $input=relative_position_bias_table.83 $expr=8907 #relative_position_bias_table.83=(225,6)f32 #8908=(4096,6)f32 prim::Constant pnnx_8361 0 1 22605 value=2 prim::Constant pnnx_8362 0 1 22606 value=0 prim::Constant pnnx_8363 0 1 22607 value=1 prim::ListConstruct pnnx_8364 3 1 22605 22606 22607 8911 Tensor.view Tensor.view_1426 2 1 8908 8909 relative_position_bias.83 $input=8908 $shape=8909 #8908=(4096,6)f32 #relative_position_bias.83=(64,64,6)f32 prim::Constant pnnx_8368 0 1 22609 value=0 torch.permute torch.permute_2690 2 1 relative_position_bias.83 8911 8912 $input=relative_position_bias.83 $dims=8911 #relative_position_bias.83=(64,64,6)f32 #8912=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_123 1 1 8912 relative_position_bias1.11 memory_format=torch.contiguous_format $input=8912 #8912=(6,64,64)f32 #relative_position_bias1.11=(6,64,64)f32 prim::Constant pnnx_8370 0 1 22610 value=1 torch.transpose torch.transpose_3055 3 1 k.83 8871 8872 8903 $input=k.83 $dim0=8871 $dim1=8872 #k.83=(36,6,64,32)f32 #8903=(36,6,32,64)f32 torch.matmul torch.matmul_2284 2 1 q1.11 8903 attn.167 $input=q1.11 $other=8903 #q1.11=(36,6,64,32)f32 #8903=(36,6,32,64)f32 #attn.167=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3298 2 1 relative_position_bias1.11 22609 8914 $input=relative_position_bias1.11 $dim=22609 #relative_position_bias1.11=(6,64,64)f32 #8914=(1,6,64,64)f32 aten::add pnnx_8371 3 1 attn.167 8914 22610 input.187 #attn.167=(36,6,64,64)f32 #8914=(1,6,64,64)f32 #input.187=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.4.attn.softmax 1 1 input.187 8916 dim=-1 #input.187=(36,6,64,64)f32 #8916=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.attn.attn_drop 1 1 8916 8917 #8916=(36,6,64,64)f32 #8917=(36,6,64,64)f32 Tensor.select Tensor.select_772 3 1 qkv1.11 22600 22601 v.83 $input=qkv1.11 $dim=22600 $index=22601 #qkv1.11=(3,36,6,64,32)f32 #v.83=(36,6,64,32)f32 prim::Constant pnnx_8373 0 1 22611 value=1 prim::Constant pnnx_8374 0 1 22612 value=2 torch.matmul torch.matmul_2285 2 1 8917 v.83 8918 $input=8917 $other=v.83 #8917=(36,6,64,64)f32 #v.83=(36,6,64,32)f32 #8918=(36,6,64,32)f32 prim::ListConstruct pnnx_8376 3 1 8883 8887 8891 8920 torch.transpose torch.transpose_3056 3 1 8918 22611 22612 8919 $input=8918 $dim0=22611 $dim1=22612 #8918=(36,6,64,32)f32 #8919=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_515 2 1 8919 8920 input1.13 $input=8919 $shape=8920 #8919=(36,64,6,32)f32 #input1.13=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.4.attn.proj 1 1 input1.13 8922 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.13=(36,64,192)f32 #8922=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.attn.proj_drop 1 1 8922 8923 #8922=(36,64,192)f32 #8923=(36,64,192)f32 prim::Constant pnnx_8378 0 1 22613 value=-1 prim::Constant pnnx_8379 0 1 22614 value=8 prim::Constant pnnx_8380 0 1 22615 value=8 prim::ListConstruct pnnx_8381 4 1 22613 22614 22615 8832 8924 prim::Constant pnnx_8383 0 1 22616 value=8 prim::Constant pnnx_8384 0 1 22617 value=trunc aten::div pnnx_8385 3 1 H0.1 22616 22617 8926 aten::Int pnnx_8386 1 1 8926 8927 prim::Constant pnnx_8387 0 1 22618 value=8 prim::Constant pnnx_8388 0 1 22619 value=trunc aten::div pnnx_8389 3 1 W0.1 22618 22619 8928 aten::Int pnnx_8390 1 1 8928 8929 prim::Constant pnnx_8391 0 1 22620 value=1 prim::Constant pnnx_8392 0 1 22621 value=8 prim::Constant pnnx_8393 0 1 22622 value=8 prim::Constant pnnx_8394 0 1 22623 value=-1 prim::ListConstruct pnnx_8395 6 1 22620 8927 8929 22621 22622 22623 8930 prim::Constant pnnx_8397 0 1 22624 value=0 prim::Constant pnnx_8398 0 1 22625 value=1 prim::Constant pnnx_8399 0 1 22626 value=3 prim::Constant pnnx_8400 0 1 22627 value=2 prim::Constant pnnx_8401 0 1 22628 value=4 prim::Constant pnnx_8402 0 1 22629 value=5 prim::ListConstruct pnnx_8403 6 1 22624 22625 22626 22627 22628 22629 8932 Tensor.view Tensor.view_1427 2 1 8923 8924 windows.83 $input=8923 $shape=8924 #8923=(36,64,192)f32 #windows.83=(36,8,8,192)f32 Tensor.view Tensor.view_1428 2 1 windows.83 8930 x7.11 $input=windows.83 $shape=8930 #windows.83=(36,8,8,192)f32 #x7.11=(1,6,6,8,8,192)f32 prim::Constant pnnx_8407 0 1 22631 value=1 prim::Constant pnnx_8408 0 1 22632 value=-1 prim::ListConstruct pnnx_8409 4 1 22631 1055 1295 22632 8935 torch.permute torch.permute_2691 2 1 x7.11 8932 8933 $input=x7.11 $dims=8932 #x7.11=(1,6,6,8,8,192)f32 #8933=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_124 1 1 8933 8934 memory_format=torch.contiguous_format $input=8933 #8933=(1,6,8,6,8,192)f32 #8934=(1,6,8,6,8,192)f32 aten::mul pnnx_8411 2 1 H0.1 W0.1 8937 aten::Int pnnx_8412 1 1 8937 8938 prim::ListConstruct pnnx_8413 3 1 8827 8938 8831 8939 prim::Constant pnnx_8415 0 1 8941 value=None prim::Constant pnnx_8416 0 1 22633 value=1 Tensor.view Tensor.view_1429 2 1 8934 8935 x8.11 $input=8934 $shape=8935 #8934=(1,6,8,6,8,192)f32 #x8.11=(1,48,48,192)f32 Tensor.view Tensor.view_1430 2 1 x8.11 8939 x9.11 $input=x8.11 $shape=8939 #x8.11=(1,48,48,192)f32 #x9.11=(1,2304,192)f32 aten::add pnnx_8417 3 1 8808 x9.11 22633 input.189 #8808=(1,2304,192)f32 #x9.11=(1,2304,192)f32 #input.189=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.4.norm2 1 1 input.189 8943 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.189=(1,2304,192)f32 #8943=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.4.mlp.fc1 1 1 8943 8948 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #8943=(1,2304,192)f32 #8948=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.4.mlp.act 1 1 8948 8949 #8948=(1,2304,384)f32 #8949=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.mlp.drop 1 1 8949 8950 #8949=(1,2304,384)f32 #8950=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.4.mlp.fc2 1 1 8950 8951 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #8950=(1,2304,384)f32 #8951=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.4.mlp.drop 1 1 8951 8952 #8951=(1,2304,192)f32 #8952=(1,2304,192)f32 prim::Constant pnnx_8418 0 1 8953 value=None prim::Constant pnnx_8419 0 1 22634 value=1 aten::add pnnx_8420 3 1 input.189 8952 22634 8954 #input.189=(1,2304,192)f32 #8952=(1,2304,192)f32 #8954=(1,2304,192)f32 prim::Constant pnnx_8421 0 1 8955 value=trunc prim::Constant pnnx_8422 0 1 8956 value=8 prim::Constant pnnx_8423 0 1 8957 value=0 prim::Constant pnnx_8424 0 1 8958 value=2 prim::Constant pnnx_8425 0 1 8959 value=-4 prim::Constant pnnx_8426 0 1 8960 value=1 prim::Constant pnnx_8427 0 1 8961 value=3 prim::Constant pnnx_8428 0 1 8962 value=8 prim::Constant pnnx_8429 0 1 8963 value=4 prim::Constant pnnx_8430 0 1 8964 value=5 prim::Constant pnnx_8431 0 1 8965 value=-1 prim::Constant pnnx_8432 0 1 8966 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.5 0 1 attn_mask.43 @attn_mask=(36,64,64)f32 #attn_mask.43=(36,64,64)f32 aten::size pnnx_8433 2 1 8954 8957 8973 #8954=(1,2304,192)f32 prim::NumToTensor pnnx_8434 1 1 8973 B.101 aten::Int pnnx_8435 1 1 B.101 8975 aten::Int pnnx_8436 1 1 B.101 8976 aten::size pnnx_8437 2 1 8954 8958 8977 #8954=(1,2304,192)f32 prim::NumToTensor pnnx_8438 1 1 8977 C.175 aten::Int pnnx_8439 1 1 C.175 8979 aten::Int pnnx_8440 1 1 C.175 8980 aten::Int pnnx_8441 1 1 C.175 8981 aten::Int pnnx_8442 1 1 C.175 8982 nn.LayerNorm layers_dfe.0.residual_group.blocks.5.norm1 1 1 8954 8983 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #8954=(1,2304,192)f32 #8983=(1,2304,192)f32 prim::ListConstruct pnnx_8443 4 1 8976 1052 1292 8982 8984 prim::Constant pnnx_8445 0 1 22635 value=-4 prim::ListConstruct pnnx_8446 2 1 8959 22635 8986 prim::Constant pnnx_8447 0 1 22636 value=2 prim::ListConstruct pnnx_8448 2 1 8960 22636 8987 Tensor.view Tensor.view_1431 2 1 8983 8984 x.85 $input=8983 $shape=8984 #8983=(1,2304,192)f32 #x.85=(1,48,48,192)f32 prim::Constant pnnx_8450 0 1 22637 value=0 torch.roll torch.roll_2460 3 1 x.85 8986 8987 x6.13 $input=x.85 $shifts=8986 $dims=8987 #x.85=(1,48,48,192)f32 #x6.13=(1,48,48,192)f32 aten::size pnnx_8451 2 1 x6.13 22637 8989 #x6.13=(1,48,48,192)f32 prim::NumToTensor pnnx_8452 1 1 8989 B1.13 aten::Int pnnx_8453 1 1 B1.13 8991 prim::Constant pnnx_8454 0 1 22638 value=1 aten::size pnnx_8455 2 1 x6.13 22638 8992 #x6.13=(1,48,48,192)f32 prim::NumToTensor pnnx_8456 1 1 8992 8993 prim::Constant pnnx_8457 0 1 22639 value=2 aten::size pnnx_8458 2 1 x6.13 22639 8994 #x6.13=(1,48,48,192)f32 prim::NumToTensor pnnx_8459 1 1 8994 8995 aten::size pnnx_8460 2 1 x6.13 8961 8996 #x6.13=(1,48,48,192)f32 prim::NumToTensor pnnx_8461 1 1 8996 C1.13 aten::Int pnnx_8462 1 1 C1.13 8998 aten::Int pnnx_8463 1 1 C1.13 8999 aten::div pnnx_8464 3 1 8993 8956 8955 9000 aten::Int pnnx_8465 1 1 9000 9001 prim::Constant pnnx_8466 0 1 22640 value=8 prim::Constant pnnx_8467 0 1 22641 value=trunc aten::div pnnx_8468 3 1 8995 22640 22641 9002 aten::Int pnnx_8469 1 1 9002 9003 prim::Constant pnnx_8470 0 1 22642 value=8 prim::ListConstruct pnnx_8471 6 1 8991 9001 8962 9003 22642 8999 9004 prim::Constant pnnx_8473 0 1 22643 value=0 prim::Constant pnnx_8474 0 1 22644 value=1 prim::Constant pnnx_8475 0 1 22645 value=3 prim::Constant pnnx_8476 0 1 22646 value=2 prim::ListConstruct pnnx_8477 6 1 22643 22644 22645 22646 8963 8964 9006 Tensor.view Tensor.view_1432 2 1 x6.13 9004 x7.13 $input=x6.13 $shape=9004 #x6.13=(1,48,48,192)f32 #x7.13=(1,6,8,6,8,192)f32 prim::Constant pnnx_8481 0 1 22648 value=8 prim::Constant pnnx_8482 0 1 22649 value=8 prim::ListConstruct pnnx_8483 4 1 8965 22648 22649 8998 9009 torch.permute torch.permute_2692 2 1 x7.13 9006 9007 $input=x7.13 $dims=9006 #x7.13=(1,6,8,6,8,192)f32 #9007=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_125 1 1 9007 9008 memory_format=torch.contiguous_format $input=9007 #9007=(1,6,6,8,8,192)f32 #9008=(1,6,6,8,8,192)f32 prim::Constant pnnx_8485 0 1 22650 value=-1 prim::ListConstruct pnnx_8486 3 1 22650 8966 8981 9011 prim::Constant pnnx_8488 0 1 9013 value=1.767767e-01 prim::Constant pnnx_8489 0 1 9014 value=trunc prim::Constant pnnx_8490 0 1 9015 value=6 prim::Constant pnnx_8491 0 1 9016 value=0 prim::Constant pnnx_8492 0 1 9017 value=1 prim::Constant pnnx_8493 0 1 9018 value=2 prim::Constant pnnx_8494 0 1 9019 value=3 prim::Constant pnnx_8495 0 1 9020 value=6 prim::Constant pnnx_8496 0 1 9021 value=4 prim::Constant pnnx_8497 0 1 9022 value=-2 prim::Constant pnnx_8498 0 1 9023 value=-1 prim::Constant pnnx_8499 0 1 9024 value=64 pnnx.Attribute layers_dfe.0.residual_group.blocks.5.attn 0 1 relative_position_bias_table.85 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.85=(225,6)f32 pnnx.Attribute layers_dfe.0.residual_group.blocks.5.attn 0 1 relative_position_index.85 @relative_position_index=(64,64)i64 #relative_position_index.85=(64,64)i64 Tensor.view Tensor.view_1433 2 1 9008 9009 x_windows.85 $input=9008 $shape=9009 #9008=(1,6,6,8,8,192)f32 #x_windows.85=(36,8,8,192)f32 Tensor.view Tensor.view_1434 2 1 x_windows.85 9011 x8.13 $input=x_windows.85 $shape=9011 #x_windows.85=(36,8,8,192)f32 #x8.13=(36,64,192)f32 aten::size pnnx_8500 2 1 x8.13 9016 9032 #x8.13=(36,64,192)f32 prim::NumToTensor pnnx_8501 1 1 9032 B_.85 aten::Int pnnx_8502 1 1 B_.85 9034 aten::Int pnnx_8503 1 1 B_.85 9035 aten::size pnnx_8504 2 1 x8.13 9017 9036 #x8.13=(36,64,192)f32 prim::NumToTensor pnnx_8505 1 1 9036 N.85 aten::Int pnnx_8506 1 1 N.85 9038 aten::Int pnnx_8507 1 1 N.85 9039 aten::Int pnnx_8508 1 1 N.85 9040 aten::Int pnnx_8509 1 1 N.85 9041 aten::Int pnnx_8510 1 1 N.85 9042 aten::Int pnnx_8511 1 1 N.85 9043 aten::size pnnx_8512 2 1 x8.13 9018 9044 #x8.13=(36,64,192)f32 prim::NumToTensor pnnx_8513 1 1 9044 C.177 aten::Int pnnx_8514 1 1 C.177 9046 nn.Linear layers_dfe.0.residual_group.blocks.5.attn.qkv 1 1 x8.13 9047 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.13=(36,64,192)f32 #9047=(36,64,576)f32 aten::div pnnx_8515 3 1 C.177 9015 9014 9048 aten::Int pnnx_8516 1 1 9048 9049 prim::ListConstruct pnnx_8517 5 1 9035 9043 9019 9020 9049 9050 prim::Constant pnnx_8519 0 1 22651 value=2 prim::Constant pnnx_8520 0 1 22652 value=0 prim::Constant pnnx_8521 0 1 22653 value=3 prim::Constant pnnx_8522 0 1 22654 value=1 prim::ListConstruct pnnx_8523 5 1 22651 22652 22653 22654 9021 9052 Tensor.reshape Tensor.reshape_516 2 1 9047 9050 9051 $input=9047 $shape=9050 #9047=(36,64,576)f32 #9051=(36,64,3,6,32)f32 prim::Constant pnnx_8525 0 1 22655 value=0 prim::Constant pnnx_8526 0 1 22656 value=0 prim::Constant pnnx_8528 0 1 22657 value=0 prim::Constant pnnx_8529 0 1 22658 value=1 prim::Constant pnnx_8531 0 1 22659 value=0 prim::Constant pnnx_8532 0 1 22660 value=2 torch.permute torch.permute_2693 2 1 9051 9052 qkv1.13 $input=9051 $dims=9052 #9051=(36,64,3,6,32)f32 #qkv1.13=(3,36,6,64,32)f32 Tensor.select Tensor.select_773 3 1 qkv1.13 22655 22656 q.85 $input=qkv1.13 $dim=22655 $index=22656 #qkv1.13=(3,36,6,64,32)f32 #q.85=(36,6,64,32)f32 aten::mul pnnx_8534 2 1 q.85 9013 q1.13 #q.85=(36,6,64,32)f32 #q1.13=(36,6,64,32)f32 Tensor.select Tensor.select_774 3 1 qkv1.13 22657 22658 k.85 $input=qkv1.13 $dim=22657 $index=22658 #qkv1.13=(3,36,6,64,32)f32 #k.85=(36,6,64,32)f32 prim::Constant pnnx_8537 0 1 22661 value=-1 prim::ListConstruct pnnx_8538 1 1 22661 9060 Tensor.view Tensor.view_1435 2 1 relative_position_index.85 9060 9061 $input=relative_position_index.85 $shape=9060 #relative_position_index.85=(64,64)i64 #9061=(4096)i64 prim::ListConstruct pnnx_8540 1 1 9061 9062 #9061=(4096)i64 prim::Constant pnnx_8542 0 1 22662 value=64 prim::Constant pnnx_8543 0 1 22663 value=-1 prim::ListConstruct pnnx_8544 3 1 9024 22662 22663 9064 Tensor.index Tensor.index_367 2 1 relative_position_bias_table.85 9062 9063 $input=relative_position_bias_table.85 $expr=9062 #relative_position_bias_table.85=(225,6)f32 #9063=(4096,6)f32 prim::Constant pnnx_8546 0 1 22664 value=2 prim::Constant pnnx_8547 0 1 22665 value=0 prim::Constant pnnx_8548 0 1 22666 value=1 prim::ListConstruct pnnx_8549 3 1 22664 22665 22666 9066 Tensor.view Tensor.view_1436 2 1 9063 9064 relative_position_bias.85 $input=9063 $shape=9064 #9063=(4096,6)f32 #relative_position_bias.85=(64,64,6)f32 prim::Constant pnnx_8553 0 1 22668 value=0 torch.permute torch.permute_2694 2 1 relative_position_bias.85 9066 9067 $input=relative_position_bias.85 $dims=9066 #relative_position_bias.85=(64,64,6)f32 #9067=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_126 1 1 9067 relative_position_bias1.13 memory_format=torch.contiguous_format $input=9067 #9067=(6,64,64)f32 #relative_position_bias1.13=(6,64,64)f32 prim::Constant pnnx_8555 0 1 22669 value=1 torch.transpose torch.transpose_3057 3 1 k.85 9022 9023 9058 $input=k.85 $dim0=9022 $dim1=9023 #k.85=(36,6,64,32)f32 #9058=(36,6,32,64)f32 torch.matmul torch.matmul_2286 2 1 q1.13 9058 attn.171 $input=q1.13 $other=9058 #q1.13=(36,6,64,32)f32 #9058=(36,6,32,64)f32 #attn.171=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3299 2 1 relative_position_bias1.13 22668 9069 $input=relative_position_bias1.13 $dim=22668 #relative_position_bias1.13=(6,64,64)f32 #9069=(1,6,64,64)f32 aten::add pnnx_8556 3 1 attn.171 9069 22669 attn2.7 #attn.171=(36,6,64,64)f32 #9069=(1,6,64,64)f32 #attn2.7=(36,6,64,64)f32 prim::Constant pnnx_8557 0 1 22670 value=0 aten::size pnnx_8558 2 1 attn_mask.43 22670 9071 #attn_mask.43=(36,64,64)f32 prim::NumToTensor pnnx_8559 1 1 9071 other.43 aten::Int pnnx_8560 1 1 other.43 9073 prim::Constant pnnx_8561 0 1 22671 value=trunc aten::div pnnx_8562 3 1 B_.85 other.43 22671 9074 aten::Int pnnx_8563 1 1 9074 9075 prim::Constant pnnx_8564 0 1 22672 value=6 prim::ListConstruct pnnx_8565 5 1 9075 9073 22672 9042 9041 9076 prim::Constant pnnx_8567 0 1 22673 value=1 prim::Constant pnnx_8569 0 1 22674 value=0 prim::Constant pnnx_8571 0 1 22675 value=1 Tensor.view Tensor.view_1437 2 1 attn2.7 9076 9077 $input=attn2.7 $shape=9076 #attn2.7=(36,6,64,64)f32 #9077=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3300 2 1 attn_mask.43 22673 9078 $input=attn_mask.43 $dim=22673 #attn_mask.43=(36,64,64)f32 #9078=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3301 2 1 9078 22674 9079 $input=9078 $dim=22674 #9078=(36,1,64,64)f32 #9079=(1,36,1,64,64)f32 aten::add pnnx_8572 3 1 9077 9079 22675 attn3.7 #9077=(1,36,6,64,64)f32 #9079=(1,36,1,64,64)f32 #attn3.7=(1,36,6,64,64)f32 prim::Constant pnnx_8573 0 1 22676 value=-1 prim::Constant pnnx_8574 0 1 22677 value=6 prim::ListConstruct pnnx_8575 4 1 22676 22677 9040 9039 9081 Tensor.view Tensor.view_1438 2 1 attn3.7 9081 input.191 $input=attn3.7 $shape=9081 #attn3.7=(1,36,6,64,64)f32 #input.191=(36,6,64,64)f32 nn.Softmax layers_dfe.0.residual_group.blocks.5.attn.softmax 1 1 input.191 9083 dim=-1 #input.191=(36,6,64,64)f32 #9083=(36,6,64,64)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.attn.attn_drop 1 1 9083 9084 #9083=(36,6,64,64)f32 #9084=(36,6,64,64)f32 Tensor.select Tensor.select_775 3 1 qkv1.13 22659 22660 v.85 $input=qkv1.13 $dim=22659 $index=22660 #qkv1.13=(3,36,6,64,32)f32 #v.85=(36,6,64,32)f32 prim::Constant pnnx_8578 0 1 22678 value=1 prim::Constant pnnx_8579 0 1 22679 value=2 torch.matmul torch.matmul_2287 2 1 9084 v.85 9085 $input=9084 $other=v.85 #9084=(36,6,64,64)f32 #v.85=(36,6,64,32)f32 #9085=(36,6,64,32)f32 prim::ListConstruct pnnx_8581 3 1 9034 9038 9046 9087 torch.transpose torch.transpose_3058 3 1 9085 22678 22679 9086 $input=9085 $dim0=22678 $dim1=22679 #9085=(36,6,64,32)f32 #9086=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_517 2 1 9086 9087 input1.15 $input=9086 $shape=9087 #9086=(36,64,6,32)f32 #input1.15=(36,64,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.5.attn.proj 1 1 input1.15 9089 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.15=(36,64,192)f32 #9089=(36,64,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.attn.proj_drop 1 1 9089 9090 #9089=(36,64,192)f32 #9090=(36,64,192)f32 prim::Constant pnnx_8583 0 1 22680 value=-1 prim::Constant pnnx_8584 0 1 22681 value=8 prim::Constant pnnx_8585 0 1 22682 value=8 prim::ListConstruct pnnx_8586 4 1 22680 22681 22682 8980 9091 prim::Constant pnnx_8588 0 1 22683 value=8 prim::Constant pnnx_8589 0 1 22684 value=trunc aten::div pnnx_8590 3 1 H0.1 22683 22684 9093 aten::Int pnnx_8591 1 1 9093 9094 prim::Constant pnnx_8592 0 1 22685 value=8 prim::Constant pnnx_8593 0 1 22686 value=trunc aten::div pnnx_8594 3 1 W0.1 22685 22686 9095 aten::Int pnnx_8595 1 1 9095 9096 prim::Constant pnnx_8596 0 1 22687 value=1 prim::Constant pnnx_8597 0 1 22688 value=8 prim::Constant pnnx_8598 0 1 22689 value=8 prim::Constant pnnx_8599 0 1 22690 value=-1 prim::ListConstruct pnnx_8600 6 1 22687 9094 9096 22688 22689 22690 9097 prim::Constant pnnx_8602 0 1 22691 value=0 prim::Constant pnnx_8603 0 1 22692 value=1 prim::Constant pnnx_8604 0 1 22693 value=3 prim::Constant pnnx_8605 0 1 22694 value=2 prim::Constant pnnx_8606 0 1 22695 value=4 prim::Constant pnnx_8607 0 1 22696 value=5 prim::ListConstruct pnnx_8608 6 1 22691 22692 22693 22694 22695 22696 9099 Tensor.view Tensor.view_1439 2 1 9090 9091 windows.85 $input=9090 $shape=9091 #9090=(36,64,192)f32 #windows.85=(36,8,8,192)f32 Tensor.view Tensor.view_1440 2 1 windows.85 9097 x9.13 $input=windows.85 $shape=9097 #windows.85=(36,8,8,192)f32 #x9.13=(1,6,6,8,8,192)f32 prim::Constant pnnx_8612 0 1 22698 value=1 prim::Constant pnnx_8613 0 1 22699 value=-1 prim::ListConstruct pnnx_8614 4 1 22698 1049 1289 22699 9102 torch.permute torch.permute_2695 2 1 x9.13 9099 9100 $input=x9.13 $dims=9099 #x9.13=(1,6,6,8,8,192)f32 #9100=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_127 1 1 9100 9101 memory_format=torch.contiguous_format $input=9100 #9100=(1,6,8,6,8,192)f32 #9101=(1,6,8,6,8,192)f32 prim::Constant pnnx_8616 0 1 22700 value=4 prim::Constant pnnx_8617 0 1 22701 value=4 prim::ListConstruct pnnx_8618 2 1 22700 22701 9104 prim::Constant pnnx_8619 0 1 22702 value=1 prim::Constant pnnx_8620 0 1 22703 value=2 prim::ListConstruct pnnx_8621 2 1 22702 22703 9105 Tensor.view Tensor.view_1441 2 1 9101 9102 shifted_x.43 $input=9101 $shape=9102 #9101=(1,6,8,6,8,192)f32 #shifted_x.43=(1,48,48,192)f32 aten::mul pnnx_8623 2 1 H0.1 W0.1 9107 aten::Int pnnx_8624 1 1 9107 9108 prim::ListConstruct pnnx_8625 3 1 8975 9108 8979 9109 prim::Constant pnnx_8627 0 1 9111 value=None prim::Constant pnnx_8628 0 1 22704 value=1 torch.roll torch.roll_2461 3 1 shifted_x.43 9104 9105 x10.7 $input=shifted_x.43 $shifts=9104 $dims=9105 #shifted_x.43=(1,48,48,192)f32 #x10.7=(1,48,48,192)f32 Tensor.view Tensor.view_1442 2 1 x10.7 9109 x11.7 $input=x10.7 $shape=9109 #x10.7=(1,48,48,192)f32 #x11.7=(1,2304,192)f32 aten::add pnnx_8629 3 1 8954 x11.7 22704 input.193 #8954=(1,2304,192)f32 #x11.7=(1,2304,192)f32 #input.193=(1,2304,192)f32 nn.LayerNorm layers_dfe.0.residual_group.blocks.5.norm2 1 1 input.193 9113 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.193=(1,2304,192)f32 #9113=(1,2304,192)f32 nn.Linear layers_dfe.0.residual_group.blocks.5.mlp.fc1 1 1 9113 9118 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #9113=(1,2304,192)f32 #9118=(1,2304,384)f32 nn.GELU layers_dfe.0.residual_group.blocks.5.mlp.act 1 1 9118 9119 #9118=(1,2304,384)f32 #9119=(1,2304,384)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.mlp.drop 1 1 9119 9120 #9119=(1,2304,384)f32 #9120=(1,2304,384)f32 nn.Linear layers_dfe.0.residual_group.blocks.5.mlp.fc2 1 1 9120 9121 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #9120=(1,2304,384)f32 #9121=(1,2304,192)f32 nn.Dropout layers_dfe.0.residual_group.blocks.5.mlp.drop 1 1 9121 9122 #9121=(1,2304,192)f32 #9122=(1,2304,192)f32 prim::Constant pnnx_8630 0 1 9123 value=None prim::Constant pnnx_8631 0 1 22705 value=1 aten::add pnnx_8632 3 1 input.193 9122 22705 9124 #input.193=(1,2304,192)f32 #9122=(1,2304,192)f32 #9124=(1,2304,192)f32 prim::Constant pnnx_8633 0 1 9125 value=0 prim::Constant pnnx_8634 0 1 9126 value=1 prim::Constant pnnx_8635 0 1 9127 value=2 prim::Constant pnnx_8636 0 1 9128 value=192 aten::size pnnx_8637 2 1 9124 9125 9129 #9124=(1,2304,192)f32 prim::NumToTensor pnnx_8638 1 1 9129 B.103 aten::Int pnnx_8639 1 1 B.103 9131 prim::ListConstruct pnnx_8641 4 1 9131 9128 1046 1286 9133 torch.transpose torch.transpose_3059 3 1 9124 9126 9127 9132 $input=9124 $dim0=9126 $dim1=9127 #9124=(1,2304,192)f32 #9132=(1,192,2304)f32 Tensor.view Tensor.view_1443 2 1 9132 9133 input.195 $input=9132 $shape=9133 #9132=(1,192,2304)f32 #input.195=(1,192,48,48)f32 nn.Conv2d layers_dfe.0.conv 1 1 input.195 9135 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.195=(1,192,48,48)f32 #9135=(1,192,48,48)f32 prim::Constant pnnx_8643 0 1 9136 value=-1 prim::Constant pnnx_8644 0 1 9137 value=2 prim::Constant pnnx_8645 0 1 9138 value=1 prim::Constant pnnx_8647 0 1 22706 value=2 torch.flatten torch.flatten_2191 3 1 9135 9137 9136 9139 $input=9135 $start_dim=9137 $end_dim=9136 #9135=(1,192,48,48)f32 #9139=(1,192,2304)f32 torch.transpose torch.transpose_3060 3 1 9139 9138 22706 9140 $input=9139 $dim0=9138 $dim1=22706 #9139=(1,192,2304)f32 #9140=(1,2304,192)f32 aten::add pnnx_8649 3 1 9140 1327 8160 9141 #9140=(1,2304,192)f32 #1327=(1,2304,192)f32 #9141=(1,2304,192)f32 prim::Constant pnnx_8650 0 1 9142 value=1 prim::Constant pnnx_8651 0 1 9159 value=trunc prim::Constant pnnx_8652 0 1 9160 value=8 prim::Constant pnnx_8653 0 1 9161 value=0 prim::Constant pnnx_8654 0 1 9162 value=2 prim::Constant pnnx_8655 0 1 9163 value=1 prim::Constant pnnx_8656 0 1 9164 value=3 prim::Constant pnnx_8657 0 1 9165 value=8 prim::Constant pnnx_8658 0 1 9166 value=4 prim::Constant pnnx_8659 0 1 9167 value=5 prim::Constant pnnx_8660 0 1 9168 value=-1 prim::Constant pnnx_8661 0 1 9169 value=64 aten::size pnnx_8662 2 1 9141 9161 9175 #9141=(1,2304,192)f32 prim::NumToTensor pnnx_8663 1 1 9175 B.105 aten::Int pnnx_8664 1 1 B.105 9177 aten::Int pnnx_8665 1 1 B.105 9178 aten::size pnnx_8666 2 1 9141 9162 9179 #9141=(1,2304,192)f32 prim::NumToTensor pnnx_8667 1 1 9179 C.179 aten::Int pnnx_8668 1 1 C.179 9181 aten::Int pnnx_8669 1 1 C.179 9182 aten::Int pnnx_8670 1 1 C.179 9183 aten::Int pnnx_8671 1 1 C.179 9184 nn.LayerNorm layers_dfe.1.residual_group.blocks.0.norm1 1 1 9141 9185 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #9141=(1,2304,192)f32 #9185=(1,2304,192)f32 prim::ListConstruct pnnx_8672 4 1 9178 1043 1283 9184 9186 prim::Constant pnnx_8674 0 1 22707 value=0 Tensor.view Tensor.view_1444 2 1 9185 9186 x.87 $input=9185 $shape=9186 #9185=(1,2304,192)f32 #x.87=(1,48,48,192)f32 aten::size pnnx_8675 2 1 x.87 22707 9188 #x.87=(1,48,48,192)f32 prim::NumToTensor pnnx_8676 1 1 9188 B1.15 aten::Int pnnx_8677 1 1 B1.15 9190 aten::size pnnx_8678 2 1 x.87 9163 9191 #x.87=(1,48,48,192)f32 prim::NumToTensor pnnx_8679 1 1 9191 9192 prim::Constant pnnx_8680 0 1 22708 value=2 aten::size pnnx_8681 2 1 x.87 22708 9193 #x.87=(1,48,48,192)f32 prim::NumToTensor pnnx_8682 1 1 9193 9194 aten::size pnnx_8683 2 1 x.87 9164 9195 #x.87=(1,48,48,192)f32 prim::NumToTensor pnnx_8684 1 1 9195 C1.15 aten::Int pnnx_8685 1 1 C1.15 9197 aten::Int pnnx_8686 1 1 C1.15 9198 aten::div pnnx_8687 3 1 9192 9160 9159 9199 aten::Int pnnx_8688 1 1 9199 9200 prim::Constant pnnx_8689 0 1 22709 value=8 prim::Constant pnnx_8690 0 1 22710 value=trunc aten::div pnnx_8691 3 1 9194 22709 22710 9201 aten::Int pnnx_8692 1 1 9201 9202 prim::Constant pnnx_8693 0 1 22711 value=8 prim::ListConstruct pnnx_8694 6 1 9190 9200 9165 9202 22711 9198 9203 prim::Constant pnnx_8696 0 1 22712 value=0 prim::Constant pnnx_8697 0 1 22713 value=1 prim::Constant pnnx_8698 0 1 22714 value=3 prim::Constant pnnx_8699 0 1 22715 value=2 prim::ListConstruct pnnx_8700 6 1 22712 22713 22714 22715 9166 9167 9205 Tensor.view Tensor.view_1445 2 1 x.87 9203 x5.45 $input=x.87 $shape=9203 #x.87=(1,48,48,192)f32 #x5.45=(1,6,8,6,8,192)f32 prim::Constant pnnx_8704 0 1 22717 value=8 prim::Constant pnnx_8705 0 1 22718 value=8 prim::ListConstruct pnnx_8706 4 1 9168 22717 22718 9197 9208 torch.permute torch.permute_2696 2 1 x5.45 9205 9206 $input=x5.45 $dims=9205 #x5.45=(1,6,8,6,8,192)f32 #9206=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_128 1 1 9206 9207 memory_format=torch.contiguous_format $input=9206 #9206=(1,6,6,8,8,192)f32 #9207=(1,6,6,8,8,192)f32 prim::Constant pnnx_8708 0 1 22719 value=-1 prim::ListConstruct pnnx_8709 3 1 22719 9169 9183 9210 prim::Constant pnnx_8711 0 1 9212 value=1.767767e-01 prim::Constant pnnx_8712 0 1 9213 value=trunc prim::Constant pnnx_8713 0 1 9214 value=6 prim::Constant pnnx_8714 0 1 9215 value=0 prim::Constant pnnx_8715 0 1 9216 value=1 prim::Constant pnnx_8716 0 1 9217 value=2 prim::Constant pnnx_8717 0 1 9218 value=3 prim::Constant pnnx_8718 0 1 9219 value=6 prim::Constant pnnx_8719 0 1 9220 value=4 prim::Constant pnnx_8720 0 1 9221 value=-2 prim::Constant pnnx_8721 0 1 9222 value=-1 prim::Constant pnnx_8722 0 1 9223 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.0.attn 0 1 relative_position_bias_table.87 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.87=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.0.attn 0 1 relative_position_index.87 @relative_position_index=(64,64)i64 #relative_position_index.87=(64,64)i64 Tensor.view Tensor.view_1446 2 1 9207 9208 x_windows.87 $input=9207 $shape=9208 #9207=(1,6,6,8,8,192)f32 #x_windows.87=(36,8,8,192)f32 Tensor.view Tensor.view_1447 2 1 x_windows.87 9210 x6.15 $input=x_windows.87 $shape=9210 #x_windows.87=(36,8,8,192)f32 #x6.15=(36,64,192)f32 aten::size pnnx_8723 2 1 x6.15 9215 9231 #x6.15=(36,64,192)f32 prim::NumToTensor pnnx_8724 1 1 9231 B_.87 aten::Int pnnx_8725 1 1 B_.87 9233 aten::Int pnnx_8726 1 1 B_.87 9234 aten::size pnnx_8727 2 1 x6.15 9216 9235 #x6.15=(36,64,192)f32 prim::NumToTensor pnnx_8728 1 1 9235 N.87 aten::Int pnnx_8729 1 1 N.87 9237 aten::Int pnnx_8730 1 1 N.87 9238 aten::size pnnx_8731 2 1 x6.15 9217 9239 #x6.15=(36,64,192)f32 prim::NumToTensor pnnx_8732 1 1 9239 C.181 aten::Int pnnx_8733 1 1 C.181 9241 nn.Linear layers_dfe.1.residual_group.blocks.0.attn.qkv 1 1 x6.15 9242 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.15=(36,64,192)f32 #9242=(36,64,576)f32 aten::div pnnx_8734 3 1 C.181 9214 9213 9243 aten::Int pnnx_8735 1 1 9243 9244 prim::ListConstruct pnnx_8736 5 1 9234 9238 9218 9219 9244 9245 prim::Constant pnnx_8738 0 1 22720 value=2 prim::Constant pnnx_8739 0 1 22721 value=0 prim::Constant pnnx_8740 0 1 22722 value=3 prim::Constant pnnx_8741 0 1 22723 value=1 prim::ListConstruct pnnx_8742 5 1 22720 22721 22722 22723 9220 9247 Tensor.reshape Tensor.reshape_518 2 1 9242 9245 9246 $input=9242 $shape=9245 #9242=(36,64,576)f32 #9246=(36,64,3,6,32)f32 prim::Constant pnnx_8744 0 1 22724 value=0 prim::Constant pnnx_8745 0 1 22725 value=0 prim::Constant pnnx_8747 0 1 22726 value=0 prim::Constant pnnx_8748 0 1 22727 value=1 prim::Constant pnnx_8750 0 1 22728 value=0 prim::Constant pnnx_8751 0 1 22729 value=2 torch.permute torch.permute_2697 2 1 9246 9247 qkv1.15 $input=9246 $dims=9247 #9246=(36,64,3,6,32)f32 #qkv1.15=(3,36,6,64,32)f32 Tensor.select Tensor.select_776 3 1 qkv1.15 22724 22725 q.87 $input=qkv1.15 $dim=22724 $index=22725 #qkv1.15=(3,36,6,64,32)f32 #q.87=(36,6,64,32)f32 aten::mul pnnx_8753 2 1 q.87 9212 q1.15 #q.87=(36,6,64,32)f32 #q1.15=(36,6,64,32)f32 Tensor.select Tensor.select_777 3 1 qkv1.15 22726 22727 k.87 $input=qkv1.15 $dim=22726 $index=22727 #qkv1.15=(3,36,6,64,32)f32 #k.87=(36,6,64,32)f32 prim::Constant pnnx_8756 0 1 22730 value=-1 prim::ListConstruct pnnx_8757 1 1 22730 9255 Tensor.view Tensor.view_1448 2 1 relative_position_index.87 9255 9256 $input=relative_position_index.87 $shape=9255 #relative_position_index.87=(64,64)i64 #9256=(4096)i64 prim::ListConstruct pnnx_8759 1 1 9256 9257 #9256=(4096)i64 prim::Constant pnnx_8761 0 1 22731 value=64 prim::Constant pnnx_8762 0 1 22732 value=-1 prim::ListConstruct pnnx_8763 3 1 9223 22731 22732 9259 Tensor.index Tensor.index_368 2 1 relative_position_bias_table.87 9257 9258 $input=relative_position_bias_table.87 $expr=9257 #relative_position_bias_table.87=(225,6)f32 #9258=(4096,6)f32 prim::Constant pnnx_8765 0 1 22733 value=2 prim::Constant pnnx_8766 0 1 22734 value=0 prim::Constant pnnx_8767 0 1 22735 value=1 prim::ListConstruct pnnx_8768 3 1 22733 22734 22735 9261 Tensor.view Tensor.view_1449 2 1 9258 9259 relative_position_bias.87 $input=9258 $shape=9259 #9258=(4096,6)f32 #relative_position_bias.87=(64,64,6)f32 prim::Constant pnnx_8772 0 1 22737 value=0 torch.permute torch.permute_2698 2 1 relative_position_bias.87 9261 9262 $input=relative_position_bias.87 $dims=9261 #relative_position_bias.87=(64,64,6)f32 #9262=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_129 1 1 9262 relative_position_bias1.15 memory_format=torch.contiguous_format $input=9262 #9262=(6,64,64)f32 #relative_position_bias1.15=(6,64,64)f32 prim::Constant pnnx_8774 0 1 22738 value=1 torch.transpose torch.transpose_3061 3 1 k.87 9221 9222 9253 $input=k.87 $dim0=9221 $dim1=9222 #k.87=(36,6,64,32)f32 #9253=(36,6,32,64)f32 torch.matmul torch.matmul_2288 2 1 q1.15 9253 attn.175 $input=q1.15 $other=9253 #q1.15=(36,6,64,32)f32 #9253=(36,6,32,64)f32 #attn.175=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3302 2 1 relative_position_bias1.15 22737 9264 $input=relative_position_bias1.15 $dim=22737 #relative_position_bias1.15=(6,64,64)f32 #9264=(1,6,64,64)f32 aten::add pnnx_8775 3 1 attn.175 9264 22738 input.197 #attn.175=(36,6,64,64)f32 #9264=(1,6,64,64)f32 #input.197=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.0.attn.softmax 1 1 input.197 9266 dim=-1 #input.197=(36,6,64,64)f32 #9266=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.attn.attn_drop 1 1 9266 9267 #9266=(36,6,64,64)f32 #9267=(36,6,64,64)f32 Tensor.select Tensor.select_778 3 1 qkv1.15 22728 22729 v.87 $input=qkv1.15 $dim=22728 $index=22729 #qkv1.15=(3,36,6,64,32)f32 #v.87=(36,6,64,32)f32 prim::Constant pnnx_8777 0 1 22739 value=1 prim::Constant pnnx_8778 0 1 22740 value=2 torch.matmul torch.matmul_2289 2 1 9267 v.87 9268 $input=9267 $other=v.87 #9267=(36,6,64,64)f32 #v.87=(36,6,64,32)f32 #9268=(36,6,64,32)f32 prim::ListConstruct pnnx_8780 3 1 9233 9237 9241 9270 torch.transpose torch.transpose_3062 3 1 9268 22739 22740 9269 $input=9268 $dim0=22739 $dim1=22740 #9268=(36,6,64,32)f32 #9269=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_519 2 1 9269 9270 input1.17 $input=9269 $shape=9270 #9269=(36,64,6,32)f32 #input1.17=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.0.attn.proj 1 1 input1.17 9272 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.17=(36,64,192)f32 #9272=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.attn.proj_drop 1 1 9272 9273 #9272=(36,64,192)f32 #9273=(36,64,192)f32 prim::Constant pnnx_8782 0 1 22741 value=-1 prim::Constant pnnx_8783 0 1 22742 value=8 prim::Constant pnnx_8784 0 1 22743 value=8 prim::ListConstruct pnnx_8785 4 1 22741 22742 22743 9182 9274 prim::Constant pnnx_8787 0 1 22744 value=8 prim::Constant pnnx_8788 0 1 22745 value=trunc aten::div pnnx_8789 3 1 H0.1 22744 22745 9276 aten::Int pnnx_8790 1 1 9276 9277 prim::Constant pnnx_8791 0 1 22746 value=8 prim::Constant pnnx_8792 0 1 22747 value=trunc aten::div pnnx_8793 3 1 W0.1 22746 22747 9278 aten::Int pnnx_8794 1 1 9278 9279 prim::Constant pnnx_8795 0 1 22748 value=1 prim::Constant pnnx_8796 0 1 22749 value=8 prim::Constant pnnx_8797 0 1 22750 value=8 prim::Constant pnnx_8798 0 1 22751 value=-1 prim::ListConstruct pnnx_8799 6 1 22748 9277 9279 22749 22750 22751 9280 prim::Constant pnnx_8801 0 1 22752 value=0 prim::Constant pnnx_8802 0 1 22753 value=1 prim::Constant pnnx_8803 0 1 22754 value=3 prim::Constant pnnx_8804 0 1 22755 value=2 prim::Constant pnnx_8805 0 1 22756 value=4 prim::Constant pnnx_8806 0 1 22757 value=5 prim::ListConstruct pnnx_8807 6 1 22752 22753 22754 22755 22756 22757 9282 Tensor.view Tensor.view_1450 2 1 9273 9274 windows.87 $input=9273 $shape=9274 #9273=(36,64,192)f32 #windows.87=(36,8,8,192)f32 Tensor.view Tensor.view_1451 2 1 windows.87 9280 x7.15 $input=windows.87 $shape=9280 #windows.87=(36,8,8,192)f32 #x7.15=(1,6,6,8,8,192)f32 prim::Constant pnnx_8811 0 1 22759 value=1 prim::Constant pnnx_8812 0 1 22760 value=-1 prim::ListConstruct pnnx_8813 4 1 22759 1040 1280 22760 9285 torch.permute torch.permute_2699 2 1 x7.15 9282 9283 $input=x7.15 $dims=9282 #x7.15=(1,6,6,8,8,192)f32 #9283=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_130 1 1 9283 9284 memory_format=torch.contiguous_format $input=9283 #9283=(1,6,8,6,8,192)f32 #9284=(1,6,8,6,8,192)f32 aten::mul pnnx_8815 2 1 H0.1 W0.1 9287 aten::Int pnnx_8816 1 1 9287 9288 prim::ListConstruct pnnx_8817 3 1 9177 9288 9181 9289 prim::Constant pnnx_8819 0 1 9291 value=None prim::Constant pnnx_8820 0 1 22761 value=1 Tensor.view Tensor.view_1452 2 1 9284 9285 x8.15 $input=9284 $shape=9285 #9284=(1,6,8,6,8,192)f32 #x8.15=(1,48,48,192)f32 Tensor.view Tensor.view_1453 2 1 x8.15 9289 x9.15 $input=x8.15 $shape=9289 #x8.15=(1,48,48,192)f32 #x9.15=(1,2304,192)f32 aten::add pnnx_8821 3 1 9141 x9.15 22761 input.199 #9141=(1,2304,192)f32 #x9.15=(1,2304,192)f32 #input.199=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.0.norm2 1 1 input.199 9293 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.199=(1,2304,192)f32 #9293=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.0.mlp.fc1 1 1 9293 9298 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #9293=(1,2304,192)f32 #9298=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.0.mlp.act 1 1 9298 9299 #9298=(1,2304,384)f32 #9299=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.mlp.drop 1 1 9299 9300 #9299=(1,2304,384)f32 #9300=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.0.mlp.fc2 1 1 9300 9301 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #9300=(1,2304,384)f32 #9301=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.0.mlp.drop 1 1 9301 9302 #9301=(1,2304,192)f32 #9302=(1,2304,192)f32 prim::Constant pnnx_8822 0 1 9303 value=None prim::Constant pnnx_8823 0 1 22762 value=1 aten::add pnnx_8824 3 1 input.199 9302 22762 9304 #input.199=(1,2304,192)f32 #9302=(1,2304,192)f32 #9304=(1,2304,192)f32 prim::Constant pnnx_8825 0 1 9305 value=trunc prim::Constant pnnx_8826 0 1 9306 value=8 prim::Constant pnnx_8827 0 1 9307 value=0 prim::Constant pnnx_8828 0 1 9308 value=2 prim::Constant pnnx_8829 0 1 9309 value=-4 prim::Constant pnnx_8830 0 1 9310 value=1 prim::Constant pnnx_8831 0 1 9311 value=3 prim::Constant pnnx_8832 0 1 9312 value=8 prim::Constant pnnx_8833 0 1 9313 value=4 prim::Constant pnnx_8834 0 1 9314 value=5 prim::Constant pnnx_8835 0 1 9315 value=-1 prim::Constant pnnx_8836 0 1 9316 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.1 0 1 attn_mask.45 @attn_mask=(36,64,64)f32 #attn_mask.45=(36,64,64)f32 aten::size pnnx_8837 2 1 9304 9307 9323 #9304=(1,2304,192)f32 prim::NumToTensor pnnx_8838 1 1 9323 B.107 aten::Int pnnx_8839 1 1 B.107 9325 aten::Int pnnx_8840 1 1 B.107 9326 aten::size pnnx_8841 2 1 9304 9308 9327 #9304=(1,2304,192)f32 prim::NumToTensor pnnx_8842 1 1 9327 C.183 aten::Int pnnx_8843 1 1 C.183 9329 aten::Int pnnx_8844 1 1 C.183 9330 aten::Int pnnx_8845 1 1 C.183 9331 aten::Int pnnx_8846 1 1 C.183 9332 nn.LayerNorm layers_dfe.1.residual_group.blocks.1.norm1 1 1 9304 9333 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #9304=(1,2304,192)f32 #9333=(1,2304,192)f32 prim::ListConstruct pnnx_8847 4 1 9326 1037 1277 9332 9334 prim::Constant pnnx_8849 0 1 22763 value=-4 prim::ListConstruct pnnx_8850 2 1 9309 22763 9336 prim::Constant pnnx_8851 0 1 22764 value=2 prim::ListConstruct pnnx_8852 2 1 9310 22764 9337 Tensor.view Tensor.view_1454 2 1 9333 9334 x.89 $input=9333 $shape=9334 #9333=(1,2304,192)f32 #x.89=(1,48,48,192)f32 prim::Constant pnnx_8854 0 1 22765 value=0 torch.roll torch.roll_2462 3 1 x.89 9336 9337 x6.17 $input=x.89 $shifts=9336 $dims=9337 #x.89=(1,48,48,192)f32 #x6.17=(1,48,48,192)f32 aten::size pnnx_8855 2 1 x6.17 22765 9339 #x6.17=(1,48,48,192)f32 prim::NumToTensor pnnx_8856 1 1 9339 B1.17 aten::Int pnnx_8857 1 1 B1.17 9341 prim::Constant pnnx_8858 0 1 22766 value=1 aten::size pnnx_8859 2 1 x6.17 22766 9342 #x6.17=(1,48,48,192)f32 prim::NumToTensor pnnx_8860 1 1 9342 9343 prim::Constant pnnx_8861 0 1 22767 value=2 aten::size pnnx_8862 2 1 x6.17 22767 9344 #x6.17=(1,48,48,192)f32 prim::NumToTensor pnnx_8863 1 1 9344 9345 aten::size pnnx_8864 2 1 x6.17 9311 9346 #x6.17=(1,48,48,192)f32 prim::NumToTensor pnnx_8865 1 1 9346 C1.17 aten::Int pnnx_8866 1 1 C1.17 9348 aten::Int pnnx_8867 1 1 C1.17 9349 aten::div pnnx_8868 3 1 9343 9306 9305 9350 aten::Int pnnx_8869 1 1 9350 9351 prim::Constant pnnx_8870 0 1 22768 value=8 prim::Constant pnnx_8871 0 1 22769 value=trunc aten::div pnnx_8872 3 1 9345 22768 22769 9352 aten::Int pnnx_8873 1 1 9352 9353 prim::Constant pnnx_8874 0 1 22770 value=8 prim::ListConstruct pnnx_8875 6 1 9341 9351 9312 9353 22770 9349 9354 prim::Constant pnnx_8877 0 1 22771 value=0 prim::Constant pnnx_8878 0 1 22772 value=1 prim::Constant pnnx_8879 0 1 22773 value=3 prim::Constant pnnx_8880 0 1 22774 value=2 prim::ListConstruct pnnx_8881 6 1 22771 22772 22773 22774 9313 9314 9356 Tensor.view Tensor.view_1455 2 1 x6.17 9354 x7.17 $input=x6.17 $shape=9354 #x6.17=(1,48,48,192)f32 #x7.17=(1,6,8,6,8,192)f32 prim::Constant pnnx_8885 0 1 22776 value=8 prim::Constant pnnx_8886 0 1 22777 value=8 prim::ListConstruct pnnx_8887 4 1 9315 22776 22777 9348 9359 torch.permute torch.permute_2700 2 1 x7.17 9356 9357 $input=x7.17 $dims=9356 #x7.17=(1,6,8,6,8,192)f32 #9357=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_131 1 1 9357 9358 memory_format=torch.contiguous_format $input=9357 #9357=(1,6,6,8,8,192)f32 #9358=(1,6,6,8,8,192)f32 prim::Constant pnnx_8889 0 1 22778 value=-1 prim::ListConstruct pnnx_8890 3 1 22778 9316 9331 9361 prim::Constant pnnx_8892 0 1 9363 value=1.767767e-01 prim::Constant pnnx_8893 0 1 9364 value=trunc prim::Constant pnnx_8894 0 1 9365 value=6 prim::Constant pnnx_8895 0 1 9366 value=0 prim::Constant pnnx_8896 0 1 9367 value=1 prim::Constant pnnx_8897 0 1 9368 value=2 prim::Constant pnnx_8898 0 1 9369 value=3 prim::Constant pnnx_8899 0 1 9370 value=6 prim::Constant pnnx_8900 0 1 9371 value=4 prim::Constant pnnx_8901 0 1 9372 value=-2 prim::Constant pnnx_8902 0 1 9373 value=-1 prim::Constant pnnx_8903 0 1 9374 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.1.attn 0 1 relative_position_bias_table.89 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.89=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.1.attn 0 1 relative_position_index.89 @relative_position_index=(64,64)i64 #relative_position_index.89=(64,64)i64 Tensor.view Tensor.view_1456 2 1 9358 9359 x_windows.89 $input=9358 $shape=9359 #9358=(1,6,6,8,8,192)f32 #x_windows.89=(36,8,8,192)f32 Tensor.view Tensor.view_1457 2 1 x_windows.89 9361 x8.17 $input=x_windows.89 $shape=9361 #x_windows.89=(36,8,8,192)f32 #x8.17=(36,64,192)f32 aten::size pnnx_8904 2 1 x8.17 9366 9382 #x8.17=(36,64,192)f32 prim::NumToTensor pnnx_8905 1 1 9382 B_.89 aten::Int pnnx_8906 1 1 B_.89 9384 aten::Int pnnx_8907 1 1 B_.89 9385 aten::size pnnx_8908 2 1 x8.17 9367 9386 #x8.17=(36,64,192)f32 prim::NumToTensor pnnx_8909 1 1 9386 N.89 aten::Int pnnx_8910 1 1 N.89 9388 aten::Int pnnx_8911 1 1 N.89 9389 aten::Int pnnx_8912 1 1 N.89 9390 aten::Int pnnx_8913 1 1 N.89 9391 aten::Int pnnx_8914 1 1 N.89 9392 aten::Int pnnx_8915 1 1 N.89 9393 aten::size pnnx_8916 2 1 x8.17 9368 9394 #x8.17=(36,64,192)f32 prim::NumToTensor pnnx_8917 1 1 9394 C.185 aten::Int pnnx_8918 1 1 C.185 9396 nn.Linear layers_dfe.1.residual_group.blocks.1.attn.qkv 1 1 x8.17 9397 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.17=(36,64,192)f32 #9397=(36,64,576)f32 aten::div pnnx_8919 3 1 C.185 9365 9364 9398 aten::Int pnnx_8920 1 1 9398 9399 prim::ListConstruct pnnx_8921 5 1 9385 9393 9369 9370 9399 9400 prim::Constant pnnx_8923 0 1 22779 value=2 prim::Constant pnnx_8924 0 1 22780 value=0 prim::Constant pnnx_8925 0 1 22781 value=3 prim::Constant pnnx_8926 0 1 22782 value=1 prim::ListConstruct pnnx_8927 5 1 22779 22780 22781 22782 9371 9402 Tensor.reshape Tensor.reshape_520 2 1 9397 9400 9401 $input=9397 $shape=9400 #9397=(36,64,576)f32 #9401=(36,64,3,6,32)f32 prim::Constant pnnx_8929 0 1 22783 value=0 prim::Constant pnnx_8930 0 1 22784 value=0 prim::Constant pnnx_8932 0 1 22785 value=0 prim::Constant pnnx_8933 0 1 22786 value=1 prim::Constant pnnx_8935 0 1 22787 value=0 prim::Constant pnnx_8936 0 1 22788 value=2 torch.permute torch.permute_2701 2 1 9401 9402 qkv1.17 $input=9401 $dims=9402 #9401=(36,64,3,6,32)f32 #qkv1.17=(3,36,6,64,32)f32 Tensor.select Tensor.select_779 3 1 qkv1.17 22783 22784 q.89 $input=qkv1.17 $dim=22783 $index=22784 #qkv1.17=(3,36,6,64,32)f32 #q.89=(36,6,64,32)f32 aten::mul pnnx_8938 2 1 q.89 9363 q1.17 #q.89=(36,6,64,32)f32 #q1.17=(36,6,64,32)f32 Tensor.select Tensor.select_780 3 1 qkv1.17 22785 22786 k.89 $input=qkv1.17 $dim=22785 $index=22786 #qkv1.17=(3,36,6,64,32)f32 #k.89=(36,6,64,32)f32 prim::Constant pnnx_8941 0 1 22789 value=-1 prim::ListConstruct pnnx_8942 1 1 22789 9410 Tensor.view Tensor.view_1458 2 1 relative_position_index.89 9410 9411 $input=relative_position_index.89 $shape=9410 #relative_position_index.89=(64,64)i64 #9411=(4096)i64 prim::ListConstruct pnnx_8944 1 1 9411 9412 #9411=(4096)i64 prim::Constant pnnx_8946 0 1 22790 value=64 prim::Constant pnnx_8947 0 1 22791 value=-1 prim::ListConstruct pnnx_8948 3 1 9374 22790 22791 9414 Tensor.index Tensor.index_369 2 1 relative_position_bias_table.89 9412 9413 $input=relative_position_bias_table.89 $expr=9412 #relative_position_bias_table.89=(225,6)f32 #9413=(4096,6)f32 prim::Constant pnnx_8950 0 1 22792 value=2 prim::Constant pnnx_8951 0 1 22793 value=0 prim::Constant pnnx_8952 0 1 22794 value=1 prim::ListConstruct pnnx_8953 3 1 22792 22793 22794 9416 Tensor.view Tensor.view_1459 2 1 9413 9414 relative_position_bias.89 $input=9413 $shape=9414 #9413=(4096,6)f32 #relative_position_bias.89=(64,64,6)f32 prim::Constant pnnx_8957 0 1 22796 value=0 torch.permute torch.permute_2702 2 1 relative_position_bias.89 9416 9417 $input=relative_position_bias.89 $dims=9416 #relative_position_bias.89=(64,64,6)f32 #9417=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_132 1 1 9417 relative_position_bias1.17 memory_format=torch.contiguous_format $input=9417 #9417=(6,64,64)f32 #relative_position_bias1.17=(6,64,64)f32 prim::Constant pnnx_8959 0 1 22797 value=1 torch.transpose torch.transpose_3063 3 1 k.89 9372 9373 9408 $input=k.89 $dim0=9372 $dim1=9373 #k.89=(36,6,64,32)f32 #9408=(36,6,32,64)f32 torch.matmul torch.matmul_2290 2 1 q1.17 9408 attn.179 $input=q1.17 $other=9408 #q1.17=(36,6,64,32)f32 #9408=(36,6,32,64)f32 #attn.179=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3303 2 1 relative_position_bias1.17 22796 9419 $input=relative_position_bias1.17 $dim=22796 #relative_position_bias1.17=(6,64,64)f32 #9419=(1,6,64,64)f32 aten::add pnnx_8960 3 1 attn.179 9419 22797 attn2.9 #attn.179=(36,6,64,64)f32 #9419=(1,6,64,64)f32 #attn2.9=(36,6,64,64)f32 prim::Constant pnnx_8961 0 1 22798 value=0 aten::size pnnx_8962 2 1 attn_mask.45 22798 9421 #attn_mask.45=(36,64,64)f32 prim::NumToTensor pnnx_8963 1 1 9421 other.45 aten::Int pnnx_8964 1 1 other.45 9423 prim::Constant pnnx_8965 0 1 22799 value=trunc aten::div pnnx_8966 3 1 B_.89 other.45 22799 9424 aten::Int pnnx_8967 1 1 9424 9425 prim::Constant pnnx_8968 0 1 22800 value=6 prim::ListConstruct pnnx_8969 5 1 9425 9423 22800 9392 9391 9426 prim::Constant pnnx_8971 0 1 22801 value=1 prim::Constant pnnx_8973 0 1 22802 value=0 prim::Constant pnnx_8975 0 1 22803 value=1 Tensor.view Tensor.view_1460 2 1 attn2.9 9426 9427 $input=attn2.9 $shape=9426 #attn2.9=(36,6,64,64)f32 #9427=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3304 2 1 attn_mask.45 22801 9428 $input=attn_mask.45 $dim=22801 #attn_mask.45=(36,64,64)f32 #9428=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3305 2 1 9428 22802 9429 $input=9428 $dim=22802 #9428=(36,1,64,64)f32 #9429=(1,36,1,64,64)f32 aten::add pnnx_8976 3 1 9427 9429 22803 attn3.9 #9427=(1,36,6,64,64)f32 #9429=(1,36,1,64,64)f32 #attn3.9=(1,36,6,64,64)f32 prim::Constant pnnx_8977 0 1 22804 value=-1 prim::Constant pnnx_8978 0 1 22805 value=6 prim::ListConstruct pnnx_8979 4 1 22804 22805 9390 9389 9431 Tensor.view Tensor.view_1461 2 1 attn3.9 9431 input.201 $input=attn3.9 $shape=9431 #attn3.9=(1,36,6,64,64)f32 #input.201=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.1.attn.softmax 1 1 input.201 9433 dim=-1 #input.201=(36,6,64,64)f32 #9433=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.attn.attn_drop 1 1 9433 9434 #9433=(36,6,64,64)f32 #9434=(36,6,64,64)f32 Tensor.select Tensor.select_781 3 1 qkv1.17 22787 22788 v.89 $input=qkv1.17 $dim=22787 $index=22788 #qkv1.17=(3,36,6,64,32)f32 #v.89=(36,6,64,32)f32 prim::Constant pnnx_8982 0 1 22806 value=1 prim::Constant pnnx_8983 0 1 22807 value=2 torch.matmul torch.matmul_2291 2 1 9434 v.89 9435 $input=9434 $other=v.89 #9434=(36,6,64,64)f32 #v.89=(36,6,64,32)f32 #9435=(36,6,64,32)f32 prim::ListConstruct pnnx_8985 3 1 9384 9388 9396 9437 torch.transpose torch.transpose_3064 3 1 9435 22806 22807 9436 $input=9435 $dim0=22806 $dim1=22807 #9435=(36,6,64,32)f32 #9436=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_521 2 1 9436 9437 input1.19 $input=9436 $shape=9437 #9436=(36,64,6,32)f32 #input1.19=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.1.attn.proj 1 1 input1.19 9439 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.19=(36,64,192)f32 #9439=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.attn.proj_drop 1 1 9439 9440 #9439=(36,64,192)f32 #9440=(36,64,192)f32 prim::Constant pnnx_8987 0 1 22808 value=-1 prim::Constant pnnx_8988 0 1 22809 value=8 prim::Constant pnnx_8989 0 1 22810 value=8 prim::ListConstruct pnnx_8990 4 1 22808 22809 22810 9330 9441 prim::Constant pnnx_8992 0 1 22811 value=8 prim::Constant pnnx_8993 0 1 22812 value=trunc aten::div pnnx_8994 3 1 H0.1 22811 22812 9443 aten::Int pnnx_8995 1 1 9443 9444 prim::Constant pnnx_8996 0 1 22813 value=8 prim::Constant pnnx_8997 0 1 22814 value=trunc aten::div pnnx_8998 3 1 W0.1 22813 22814 9445 aten::Int pnnx_8999 1 1 9445 9446 prim::Constant pnnx_9000 0 1 22815 value=1 prim::Constant pnnx_9001 0 1 22816 value=8 prim::Constant pnnx_9002 0 1 22817 value=8 prim::Constant pnnx_9003 0 1 22818 value=-1 prim::ListConstruct pnnx_9004 6 1 22815 9444 9446 22816 22817 22818 9447 prim::Constant pnnx_9006 0 1 22819 value=0 prim::Constant pnnx_9007 0 1 22820 value=1 prim::Constant pnnx_9008 0 1 22821 value=3 prim::Constant pnnx_9009 0 1 22822 value=2 prim::Constant pnnx_9010 0 1 22823 value=4 prim::Constant pnnx_9011 0 1 22824 value=5 prim::ListConstruct pnnx_9012 6 1 22819 22820 22821 22822 22823 22824 9449 Tensor.view Tensor.view_1462 2 1 9440 9441 windows.89 $input=9440 $shape=9441 #9440=(36,64,192)f32 #windows.89=(36,8,8,192)f32 Tensor.view Tensor.view_1463 2 1 windows.89 9447 x9.17 $input=windows.89 $shape=9447 #windows.89=(36,8,8,192)f32 #x9.17=(1,6,6,8,8,192)f32 prim::Constant pnnx_9016 0 1 22826 value=1 prim::Constant pnnx_9017 0 1 22827 value=-1 prim::ListConstruct pnnx_9018 4 1 22826 1034 1274 22827 9452 torch.permute torch.permute_2703 2 1 x9.17 9449 9450 $input=x9.17 $dims=9449 #x9.17=(1,6,6,8,8,192)f32 #9450=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_133 1 1 9450 9451 memory_format=torch.contiguous_format $input=9450 #9450=(1,6,8,6,8,192)f32 #9451=(1,6,8,6,8,192)f32 prim::Constant pnnx_9020 0 1 22828 value=4 prim::Constant pnnx_9021 0 1 22829 value=4 prim::ListConstruct pnnx_9022 2 1 22828 22829 9454 prim::Constant pnnx_9023 0 1 22830 value=1 prim::Constant pnnx_9024 0 1 22831 value=2 prim::ListConstruct pnnx_9025 2 1 22830 22831 9455 Tensor.view Tensor.view_1464 2 1 9451 9452 shifted_x.45 $input=9451 $shape=9452 #9451=(1,6,8,6,8,192)f32 #shifted_x.45=(1,48,48,192)f32 aten::mul pnnx_9027 2 1 H0.1 W0.1 9457 aten::Int pnnx_9028 1 1 9457 9458 prim::ListConstruct pnnx_9029 3 1 9325 9458 9329 9459 prim::Constant pnnx_9031 0 1 9461 value=None prim::Constant pnnx_9032 0 1 22832 value=1 torch.roll torch.roll_2463 3 1 shifted_x.45 9454 9455 x10.9 $input=shifted_x.45 $shifts=9454 $dims=9455 #shifted_x.45=(1,48,48,192)f32 #x10.9=(1,48,48,192)f32 Tensor.view Tensor.view_1465 2 1 x10.9 9459 x11.9 $input=x10.9 $shape=9459 #x10.9=(1,48,48,192)f32 #x11.9=(1,2304,192)f32 aten::add pnnx_9033 3 1 9304 x11.9 22832 input.203 #9304=(1,2304,192)f32 #x11.9=(1,2304,192)f32 #input.203=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.1.norm2 1 1 input.203 9463 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.203=(1,2304,192)f32 #9463=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.1.mlp.fc1 1 1 9463 9468 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #9463=(1,2304,192)f32 #9468=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.1.mlp.act 1 1 9468 9469 #9468=(1,2304,384)f32 #9469=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.mlp.drop 1 1 9469 9470 #9469=(1,2304,384)f32 #9470=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.1.mlp.fc2 1 1 9470 9471 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #9470=(1,2304,384)f32 #9471=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.1.mlp.drop 1 1 9471 9472 #9471=(1,2304,192)f32 #9472=(1,2304,192)f32 prim::Constant pnnx_9034 0 1 9473 value=None prim::Constant pnnx_9035 0 1 22833 value=1 aten::add pnnx_9036 3 1 input.203 9472 22833 9474 #input.203=(1,2304,192)f32 #9472=(1,2304,192)f32 #9474=(1,2304,192)f32 prim::Constant pnnx_9037 0 1 9475 value=trunc prim::Constant pnnx_9038 0 1 9476 value=8 prim::Constant pnnx_9039 0 1 9477 value=0 prim::Constant pnnx_9040 0 1 9478 value=2 prim::Constant pnnx_9041 0 1 9479 value=1 prim::Constant pnnx_9042 0 1 9480 value=3 prim::Constant pnnx_9043 0 1 9481 value=8 prim::Constant pnnx_9044 0 1 9482 value=4 prim::Constant pnnx_9045 0 1 9483 value=5 prim::Constant pnnx_9046 0 1 9484 value=-1 prim::Constant pnnx_9047 0 1 9485 value=64 aten::size pnnx_9048 2 1 9474 9477 9491 #9474=(1,2304,192)f32 prim::NumToTensor pnnx_9049 1 1 9491 B.109 aten::Int pnnx_9050 1 1 B.109 9493 aten::Int pnnx_9051 1 1 B.109 9494 aten::size pnnx_9052 2 1 9474 9478 9495 #9474=(1,2304,192)f32 prim::NumToTensor pnnx_9053 1 1 9495 C.187 aten::Int pnnx_9054 1 1 C.187 9497 aten::Int pnnx_9055 1 1 C.187 9498 aten::Int pnnx_9056 1 1 C.187 9499 aten::Int pnnx_9057 1 1 C.187 9500 nn.LayerNorm layers_dfe.1.residual_group.blocks.2.norm1 1 1 9474 9501 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #9474=(1,2304,192)f32 #9501=(1,2304,192)f32 prim::ListConstruct pnnx_9058 4 1 9494 1031 1271 9500 9502 prim::Constant pnnx_9060 0 1 22834 value=0 Tensor.view Tensor.view_1466 2 1 9501 9502 x.91 $input=9501 $shape=9502 #9501=(1,2304,192)f32 #x.91=(1,48,48,192)f32 aten::size pnnx_9061 2 1 x.91 22834 9504 #x.91=(1,48,48,192)f32 prim::NumToTensor pnnx_9062 1 1 9504 B1.19 aten::Int pnnx_9063 1 1 B1.19 9506 aten::size pnnx_9064 2 1 x.91 9479 9507 #x.91=(1,48,48,192)f32 prim::NumToTensor pnnx_9065 1 1 9507 9508 prim::Constant pnnx_9066 0 1 22835 value=2 aten::size pnnx_9067 2 1 x.91 22835 9509 #x.91=(1,48,48,192)f32 prim::NumToTensor pnnx_9068 1 1 9509 9510 aten::size pnnx_9069 2 1 x.91 9480 9511 #x.91=(1,48,48,192)f32 prim::NumToTensor pnnx_9070 1 1 9511 C1.19 aten::Int pnnx_9071 1 1 C1.19 9513 aten::Int pnnx_9072 1 1 C1.19 9514 aten::div pnnx_9073 3 1 9508 9476 9475 9515 aten::Int pnnx_9074 1 1 9515 9516 prim::Constant pnnx_9075 0 1 22836 value=8 prim::Constant pnnx_9076 0 1 22837 value=trunc aten::div pnnx_9077 3 1 9510 22836 22837 9517 aten::Int pnnx_9078 1 1 9517 9518 prim::Constant pnnx_9079 0 1 22838 value=8 prim::ListConstruct pnnx_9080 6 1 9506 9516 9481 9518 22838 9514 9519 prim::Constant pnnx_9082 0 1 22839 value=0 prim::Constant pnnx_9083 0 1 22840 value=1 prim::Constant pnnx_9084 0 1 22841 value=3 prim::Constant pnnx_9085 0 1 22842 value=2 prim::ListConstruct pnnx_9086 6 1 22839 22840 22841 22842 9482 9483 9521 Tensor.view Tensor.view_1467 2 1 x.91 9519 x5.47 $input=x.91 $shape=9519 #x.91=(1,48,48,192)f32 #x5.47=(1,6,8,6,8,192)f32 prim::Constant pnnx_9090 0 1 22844 value=8 prim::Constant pnnx_9091 0 1 22845 value=8 prim::ListConstruct pnnx_9092 4 1 9484 22844 22845 9513 9524 torch.permute torch.permute_2704 2 1 x5.47 9521 9522 $input=x5.47 $dims=9521 #x5.47=(1,6,8,6,8,192)f32 #9522=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_134 1 1 9522 9523 memory_format=torch.contiguous_format $input=9522 #9522=(1,6,6,8,8,192)f32 #9523=(1,6,6,8,8,192)f32 prim::Constant pnnx_9094 0 1 22846 value=-1 prim::ListConstruct pnnx_9095 3 1 22846 9485 9499 9526 prim::Constant pnnx_9097 0 1 9528 value=1.767767e-01 prim::Constant pnnx_9098 0 1 9529 value=trunc prim::Constant pnnx_9099 0 1 9530 value=6 prim::Constant pnnx_9100 0 1 9531 value=0 prim::Constant pnnx_9101 0 1 9532 value=1 prim::Constant pnnx_9102 0 1 9533 value=2 prim::Constant pnnx_9103 0 1 9534 value=3 prim::Constant pnnx_9104 0 1 9535 value=6 prim::Constant pnnx_9105 0 1 9536 value=4 prim::Constant pnnx_9106 0 1 9537 value=-2 prim::Constant pnnx_9107 0 1 9538 value=-1 prim::Constant pnnx_9108 0 1 9539 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.2.attn 0 1 relative_position_bias_table.91 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.91=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.2.attn 0 1 relative_position_index.91 @relative_position_index=(64,64)i64 #relative_position_index.91=(64,64)i64 Tensor.view Tensor.view_1468 2 1 9523 9524 x_windows.91 $input=9523 $shape=9524 #9523=(1,6,6,8,8,192)f32 #x_windows.91=(36,8,8,192)f32 Tensor.view Tensor.view_1469 2 1 x_windows.91 9526 x6.19 $input=x_windows.91 $shape=9526 #x_windows.91=(36,8,8,192)f32 #x6.19=(36,64,192)f32 aten::size pnnx_9109 2 1 x6.19 9531 9547 #x6.19=(36,64,192)f32 prim::NumToTensor pnnx_9110 1 1 9547 B_.91 aten::Int pnnx_9111 1 1 B_.91 9549 aten::Int pnnx_9112 1 1 B_.91 9550 aten::size pnnx_9113 2 1 x6.19 9532 9551 #x6.19=(36,64,192)f32 prim::NumToTensor pnnx_9114 1 1 9551 N.91 aten::Int pnnx_9115 1 1 N.91 9553 aten::Int pnnx_9116 1 1 N.91 9554 aten::size pnnx_9117 2 1 x6.19 9533 9555 #x6.19=(36,64,192)f32 prim::NumToTensor pnnx_9118 1 1 9555 C.189 aten::Int pnnx_9119 1 1 C.189 9557 nn.Linear layers_dfe.1.residual_group.blocks.2.attn.qkv 1 1 x6.19 9558 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.19=(36,64,192)f32 #9558=(36,64,576)f32 aten::div pnnx_9120 3 1 C.189 9530 9529 9559 aten::Int pnnx_9121 1 1 9559 9560 prim::ListConstruct pnnx_9122 5 1 9550 9554 9534 9535 9560 9561 prim::Constant pnnx_9124 0 1 22847 value=2 prim::Constant pnnx_9125 0 1 22848 value=0 prim::Constant pnnx_9126 0 1 22849 value=3 prim::Constant pnnx_9127 0 1 22850 value=1 prim::ListConstruct pnnx_9128 5 1 22847 22848 22849 22850 9536 9563 Tensor.reshape Tensor.reshape_522 2 1 9558 9561 9562 $input=9558 $shape=9561 #9558=(36,64,576)f32 #9562=(36,64,3,6,32)f32 prim::Constant pnnx_9130 0 1 22851 value=0 prim::Constant pnnx_9131 0 1 22852 value=0 prim::Constant pnnx_9133 0 1 22853 value=0 prim::Constant pnnx_9134 0 1 22854 value=1 prim::Constant pnnx_9136 0 1 22855 value=0 prim::Constant pnnx_9137 0 1 22856 value=2 torch.permute torch.permute_2705 2 1 9562 9563 qkv1.19 $input=9562 $dims=9563 #9562=(36,64,3,6,32)f32 #qkv1.19=(3,36,6,64,32)f32 Tensor.select Tensor.select_782 3 1 qkv1.19 22851 22852 q.91 $input=qkv1.19 $dim=22851 $index=22852 #qkv1.19=(3,36,6,64,32)f32 #q.91=(36,6,64,32)f32 aten::mul pnnx_9139 2 1 q.91 9528 q1.19 #q.91=(36,6,64,32)f32 #q1.19=(36,6,64,32)f32 Tensor.select Tensor.select_783 3 1 qkv1.19 22853 22854 k.91 $input=qkv1.19 $dim=22853 $index=22854 #qkv1.19=(3,36,6,64,32)f32 #k.91=(36,6,64,32)f32 prim::Constant pnnx_9142 0 1 22857 value=-1 prim::ListConstruct pnnx_9143 1 1 22857 9571 Tensor.view Tensor.view_1470 2 1 relative_position_index.91 9571 9572 $input=relative_position_index.91 $shape=9571 #relative_position_index.91=(64,64)i64 #9572=(4096)i64 prim::ListConstruct pnnx_9145 1 1 9572 9573 #9572=(4096)i64 prim::Constant pnnx_9147 0 1 22858 value=64 prim::Constant pnnx_9148 0 1 22859 value=-1 prim::ListConstruct pnnx_9149 3 1 9539 22858 22859 9575 Tensor.index Tensor.index_370 2 1 relative_position_bias_table.91 9573 9574 $input=relative_position_bias_table.91 $expr=9573 #relative_position_bias_table.91=(225,6)f32 #9574=(4096,6)f32 prim::Constant pnnx_9151 0 1 22860 value=2 prim::Constant pnnx_9152 0 1 22861 value=0 prim::Constant pnnx_9153 0 1 22862 value=1 prim::ListConstruct pnnx_9154 3 1 22860 22861 22862 9577 Tensor.view Tensor.view_1471 2 1 9574 9575 relative_position_bias.91 $input=9574 $shape=9575 #9574=(4096,6)f32 #relative_position_bias.91=(64,64,6)f32 prim::Constant pnnx_9158 0 1 22864 value=0 torch.permute torch.permute_2706 2 1 relative_position_bias.91 9577 9578 $input=relative_position_bias.91 $dims=9577 #relative_position_bias.91=(64,64,6)f32 #9578=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_135 1 1 9578 relative_position_bias1.19 memory_format=torch.contiguous_format $input=9578 #9578=(6,64,64)f32 #relative_position_bias1.19=(6,64,64)f32 prim::Constant pnnx_9160 0 1 22865 value=1 torch.transpose torch.transpose_3065 3 1 k.91 9537 9538 9569 $input=k.91 $dim0=9537 $dim1=9538 #k.91=(36,6,64,32)f32 #9569=(36,6,32,64)f32 torch.matmul torch.matmul_2292 2 1 q1.19 9569 attn.183 $input=q1.19 $other=9569 #q1.19=(36,6,64,32)f32 #9569=(36,6,32,64)f32 #attn.183=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3306 2 1 relative_position_bias1.19 22864 9580 $input=relative_position_bias1.19 $dim=22864 #relative_position_bias1.19=(6,64,64)f32 #9580=(1,6,64,64)f32 aten::add pnnx_9161 3 1 attn.183 9580 22865 input.205 #attn.183=(36,6,64,64)f32 #9580=(1,6,64,64)f32 #input.205=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.2.attn.softmax 1 1 input.205 9582 dim=-1 #input.205=(36,6,64,64)f32 #9582=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.attn.attn_drop 1 1 9582 9583 #9582=(36,6,64,64)f32 #9583=(36,6,64,64)f32 Tensor.select Tensor.select_784 3 1 qkv1.19 22855 22856 v.91 $input=qkv1.19 $dim=22855 $index=22856 #qkv1.19=(3,36,6,64,32)f32 #v.91=(36,6,64,32)f32 prim::Constant pnnx_9163 0 1 22866 value=1 prim::Constant pnnx_9164 0 1 22867 value=2 torch.matmul torch.matmul_2293 2 1 9583 v.91 9584 $input=9583 $other=v.91 #9583=(36,6,64,64)f32 #v.91=(36,6,64,32)f32 #9584=(36,6,64,32)f32 prim::ListConstruct pnnx_9166 3 1 9549 9553 9557 9586 torch.transpose torch.transpose_3066 3 1 9584 22866 22867 9585 $input=9584 $dim0=22866 $dim1=22867 #9584=(36,6,64,32)f32 #9585=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_523 2 1 9585 9586 input1.21 $input=9585 $shape=9586 #9585=(36,64,6,32)f32 #input1.21=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.2.attn.proj 1 1 input1.21 9588 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.21=(36,64,192)f32 #9588=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.attn.proj_drop 1 1 9588 9589 #9588=(36,64,192)f32 #9589=(36,64,192)f32 prim::Constant pnnx_9168 0 1 22868 value=-1 prim::Constant pnnx_9169 0 1 22869 value=8 prim::Constant pnnx_9170 0 1 22870 value=8 prim::ListConstruct pnnx_9171 4 1 22868 22869 22870 9498 9590 prim::Constant pnnx_9173 0 1 22871 value=8 prim::Constant pnnx_9174 0 1 22872 value=trunc aten::div pnnx_9175 3 1 H0.1 22871 22872 9592 aten::Int pnnx_9176 1 1 9592 9593 prim::Constant pnnx_9177 0 1 22873 value=8 prim::Constant pnnx_9178 0 1 22874 value=trunc aten::div pnnx_9179 3 1 W0.1 22873 22874 9594 aten::Int pnnx_9180 1 1 9594 9595 prim::Constant pnnx_9181 0 1 22875 value=1 prim::Constant pnnx_9182 0 1 22876 value=8 prim::Constant pnnx_9183 0 1 22877 value=8 prim::Constant pnnx_9184 0 1 22878 value=-1 prim::ListConstruct pnnx_9185 6 1 22875 9593 9595 22876 22877 22878 9596 prim::Constant pnnx_9187 0 1 22879 value=0 prim::Constant pnnx_9188 0 1 22880 value=1 prim::Constant pnnx_9189 0 1 22881 value=3 prim::Constant pnnx_9190 0 1 22882 value=2 prim::Constant pnnx_9191 0 1 22883 value=4 prim::Constant pnnx_9192 0 1 22884 value=5 prim::ListConstruct pnnx_9193 6 1 22879 22880 22881 22882 22883 22884 9598 Tensor.view Tensor.view_1472 2 1 9589 9590 windows.91 $input=9589 $shape=9590 #9589=(36,64,192)f32 #windows.91=(36,8,8,192)f32 Tensor.view Tensor.view_1473 2 1 windows.91 9596 x7.19 $input=windows.91 $shape=9596 #windows.91=(36,8,8,192)f32 #x7.19=(1,6,6,8,8,192)f32 prim::Constant pnnx_9197 0 1 22886 value=1 prim::Constant pnnx_9198 0 1 22887 value=-1 prim::ListConstruct pnnx_9199 4 1 22886 1028 1268 22887 9601 torch.permute torch.permute_2707 2 1 x7.19 9598 9599 $input=x7.19 $dims=9598 #x7.19=(1,6,6,8,8,192)f32 #9599=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_136 1 1 9599 9600 memory_format=torch.contiguous_format $input=9599 #9599=(1,6,8,6,8,192)f32 #9600=(1,6,8,6,8,192)f32 aten::mul pnnx_9201 2 1 H0.1 W0.1 9603 aten::Int pnnx_9202 1 1 9603 9604 prim::ListConstruct pnnx_9203 3 1 9493 9604 9497 9605 prim::Constant pnnx_9205 0 1 9607 value=None prim::Constant pnnx_9206 0 1 22888 value=1 Tensor.view Tensor.view_1474 2 1 9600 9601 x8.19 $input=9600 $shape=9601 #9600=(1,6,8,6,8,192)f32 #x8.19=(1,48,48,192)f32 Tensor.view Tensor.view_1475 2 1 x8.19 9605 x9.19 $input=x8.19 $shape=9605 #x8.19=(1,48,48,192)f32 #x9.19=(1,2304,192)f32 aten::add pnnx_9207 3 1 9474 x9.19 22888 input.207 #9474=(1,2304,192)f32 #x9.19=(1,2304,192)f32 #input.207=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.2.norm2 1 1 input.207 9609 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.207=(1,2304,192)f32 #9609=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.2.mlp.fc1 1 1 9609 9614 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #9609=(1,2304,192)f32 #9614=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.2.mlp.act 1 1 9614 9615 #9614=(1,2304,384)f32 #9615=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.mlp.drop 1 1 9615 9616 #9615=(1,2304,384)f32 #9616=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.2.mlp.fc2 1 1 9616 9617 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #9616=(1,2304,384)f32 #9617=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.2.mlp.drop 1 1 9617 9618 #9617=(1,2304,192)f32 #9618=(1,2304,192)f32 prim::Constant pnnx_9208 0 1 9619 value=None prim::Constant pnnx_9209 0 1 22889 value=1 aten::add pnnx_9210 3 1 input.207 9618 22889 9620 #input.207=(1,2304,192)f32 #9618=(1,2304,192)f32 #9620=(1,2304,192)f32 prim::Constant pnnx_9211 0 1 9621 value=trunc prim::Constant pnnx_9212 0 1 9622 value=8 prim::Constant pnnx_9213 0 1 9623 value=0 prim::Constant pnnx_9214 0 1 9624 value=2 prim::Constant pnnx_9215 0 1 9625 value=-4 prim::Constant pnnx_9216 0 1 9626 value=1 prim::Constant pnnx_9217 0 1 9627 value=3 prim::Constant pnnx_9218 0 1 9628 value=8 prim::Constant pnnx_9219 0 1 9629 value=4 prim::Constant pnnx_9220 0 1 9630 value=5 prim::Constant pnnx_9221 0 1 9631 value=-1 prim::Constant pnnx_9222 0 1 9632 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.3 0 1 attn_mask.47 @attn_mask=(36,64,64)f32 #attn_mask.47=(36,64,64)f32 aten::size pnnx_9223 2 1 9620 9623 9639 #9620=(1,2304,192)f32 prim::NumToTensor pnnx_9224 1 1 9639 B.111 aten::Int pnnx_9225 1 1 B.111 9641 aten::Int pnnx_9226 1 1 B.111 9642 aten::size pnnx_9227 2 1 9620 9624 9643 #9620=(1,2304,192)f32 prim::NumToTensor pnnx_9228 1 1 9643 C.191 aten::Int pnnx_9229 1 1 C.191 9645 aten::Int pnnx_9230 1 1 C.191 9646 aten::Int pnnx_9231 1 1 C.191 9647 aten::Int pnnx_9232 1 1 C.191 9648 nn.LayerNorm layers_dfe.1.residual_group.blocks.3.norm1 1 1 9620 9649 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #9620=(1,2304,192)f32 #9649=(1,2304,192)f32 prim::ListConstruct pnnx_9233 4 1 9642 1025 1265 9648 9650 prim::Constant pnnx_9235 0 1 22890 value=-4 prim::ListConstruct pnnx_9236 2 1 9625 22890 9652 prim::Constant pnnx_9237 0 1 22891 value=2 prim::ListConstruct pnnx_9238 2 1 9626 22891 9653 Tensor.view Tensor.view_1476 2 1 9649 9650 x.93 $input=9649 $shape=9650 #9649=(1,2304,192)f32 #x.93=(1,48,48,192)f32 prim::Constant pnnx_9240 0 1 22892 value=0 torch.roll torch.roll_2464 3 1 x.93 9652 9653 x6.21 $input=x.93 $shifts=9652 $dims=9653 #x.93=(1,48,48,192)f32 #x6.21=(1,48,48,192)f32 aten::size pnnx_9241 2 1 x6.21 22892 9655 #x6.21=(1,48,48,192)f32 prim::NumToTensor pnnx_9242 1 1 9655 B1.21 aten::Int pnnx_9243 1 1 B1.21 9657 prim::Constant pnnx_9244 0 1 22893 value=1 aten::size pnnx_9245 2 1 x6.21 22893 9658 #x6.21=(1,48,48,192)f32 prim::NumToTensor pnnx_9246 1 1 9658 9659 prim::Constant pnnx_9247 0 1 22894 value=2 aten::size pnnx_9248 2 1 x6.21 22894 9660 #x6.21=(1,48,48,192)f32 prim::NumToTensor pnnx_9249 1 1 9660 9661 aten::size pnnx_9250 2 1 x6.21 9627 9662 #x6.21=(1,48,48,192)f32 prim::NumToTensor pnnx_9251 1 1 9662 C1.21 aten::Int pnnx_9252 1 1 C1.21 9664 aten::Int pnnx_9253 1 1 C1.21 9665 aten::div pnnx_9254 3 1 9659 9622 9621 9666 aten::Int pnnx_9255 1 1 9666 9667 prim::Constant pnnx_9256 0 1 22895 value=8 prim::Constant pnnx_9257 0 1 22896 value=trunc aten::div pnnx_9258 3 1 9661 22895 22896 9668 aten::Int pnnx_9259 1 1 9668 9669 prim::Constant pnnx_9260 0 1 22897 value=8 prim::ListConstruct pnnx_9261 6 1 9657 9667 9628 9669 22897 9665 9670 prim::Constant pnnx_9263 0 1 22898 value=0 prim::Constant pnnx_9264 0 1 22899 value=1 prim::Constant pnnx_9265 0 1 22900 value=3 prim::Constant pnnx_9266 0 1 22901 value=2 prim::ListConstruct pnnx_9267 6 1 22898 22899 22900 22901 9629 9630 9672 Tensor.view Tensor.view_1477 2 1 x6.21 9670 x7.21 $input=x6.21 $shape=9670 #x6.21=(1,48,48,192)f32 #x7.21=(1,6,8,6,8,192)f32 prim::Constant pnnx_9271 0 1 22903 value=8 prim::Constant pnnx_9272 0 1 22904 value=8 prim::ListConstruct pnnx_9273 4 1 9631 22903 22904 9664 9675 torch.permute torch.permute_2708 2 1 x7.21 9672 9673 $input=x7.21 $dims=9672 #x7.21=(1,6,8,6,8,192)f32 #9673=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_137 1 1 9673 9674 memory_format=torch.contiguous_format $input=9673 #9673=(1,6,6,8,8,192)f32 #9674=(1,6,6,8,8,192)f32 prim::Constant pnnx_9275 0 1 22905 value=-1 prim::ListConstruct pnnx_9276 3 1 22905 9632 9647 9677 prim::Constant pnnx_9278 0 1 9679 value=1.767767e-01 prim::Constant pnnx_9279 0 1 9680 value=trunc prim::Constant pnnx_9280 0 1 9681 value=6 prim::Constant pnnx_9281 0 1 9682 value=0 prim::Constant pnnx_9282 0 1 9683 value=1 prim::Constant pnnx_9283 0 1 9684 value=2 prim::Constant pnnx_9284 0 1 9685 value=3 prim::Constant pnnx_9285 0 1 9686 value=6 prim::Constant pnnx_9286 0 1 9687 value=4 prim::Constant pnnx_9287 0 1 9688 value=-2 prim::Constant pnnx_9288 0 1 9689 value=-1 prim::Constant pnnx_9289 0 1 9690 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.3.attn 0 1 relative_position_bias_table.93 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.93=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.3.attn 0 1 relative_position_index.93 @relative_position_index=(64,64)i64 #relative_position_index.93=(64,64)i64 Tensor.view Tensor.view_1478 2 1 9674 9675 x_windows.93 $input=9674 $shape=9675 #9674=(1,6,6,8,8,192)f32 #x_windows.93=(36,8,8,192)f32 Tensor.view Tensor.view_1479 2 1 x_windows.93 9677 x8.21 $input=x_windows.93 $shape=9677 #x_windows.93=(36,8,8,192)f32 #x8.21=(36,64,192)f32 aten::size pnnx_9290 2 1 x8.21 9682 9698 #x8.21=(36,64,192)f32 prim::NumToTensor pnnx_9291 1 1 9698 B_.93 aten::Int pnnx_9292 1 1 B_.93 9700 aten::Int pnnx_9293 1 1 B_.93 9701 aten::size pnnx_9294 2 1 x8.21 9683 9702 #x8.21=(36,64,192)f32 prim::NumToTensor pnnx_9295 1 1 9702 N.93 aten::Int pnnx_9296 1 1 N.93 9704 aten::Int pnnx_9297 1 1 N.93 9705 aten::Int pnnx_9298 1 1 N.93 9706 aten::Int pnnx_9299 1 1 N.93 9707 aten::Int pnnx_9300 1 1 N.93 9708 aten::Int pnnx_9301 1 1 N.93 9709 aten::size pnnx_9302 2 1 x8.21 9684 9710 #x8.21=(36,64,192)f32 prim::NumToTensor pnnx_9303 1 1 9710 C.193 aten::Int pnnx_9304 1 1 C.193 9712 nn.Linear layers_dfe.1.residual_group.blocks.3.attn.qkv 1 1 x8.21 9713 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.21=(36,64,192)f32 #9713=(36,64,576)f32 aten::div pnnx_9305 3 1 C.193 9681 9680 9714 aten::Int pnnx_9306 1 1 9714 9715 prim::ListConstruct pnnx_9307 5 1 9701 9709 9685 9686 9715 9716 prim::Constant pnnx_9309 0 1 22906 value=2 prim::Constant pnnx_9310 0 1 22907 value=0 prim::Constant pnnx_9311 0 1 22908 value=3 prim::Constant pnnx_9312 0 1 22909 value=1 prim::ListConstruct pnnx_9313 5 1 22906 22907 22908 22909 9687 9718 Tensor.reshape Tensor.reshape_524 2 1 9713 9716 9717 $input=9713 $shape=9716 #9713=(36,64,576)f32 #9717=(36,64,3,6,32)f32 prim::Constant pnnx_9315 0 1 22910 value=0 prim::Constant pnnx_9316 0 1 22911 value=0 prim::Constant pnnx_9318 0 1 22912 value=0 prim::Constant pnnx_9319 0 1 22913 value=1 prim::Constant pnnx_9321 0 1 22914 value=0 prim::Constant pnnx_9322 0 1 22915 value=2 torch.permute torch.permute_2709 2 1 9717 9718 qkv1.21 $input=9717 $dims=9718 #9717=(36,64,3,6,32)f32 #qkv1.21=(3,36,6,64,32)f32 Tensor.select Tensor.select_785 3 1 qkv1.21 22910 22911 q.93 $input=qkv1.21 $dim=22910 $index=22911 #qkv1.21=(3,36,6,64,32)f32 #q.93=(36,6,64,32)f32 aten::mul pnnx_9324 2 1 q.93 9679 q1.21 #q.93=(36,6,64,32)f32 #q1.21=(36,6,64,32)f32 Tensor.select Tensor.select_786 3 1 qkv1.21 22912 22913 k.93 $input=qkv1.21 $dim=22912 $index=22913 #qkv1.21=(3,36,6,64,32)f32 #k.93=(36,6,64,32)f32 prim::Constant pnnx_9327 0 1 22916 value=-1 prim::ListConstruct pnnx_9328 1 1 22916 9726 Tensor.view Tensor.view_1480 2 1 relative_position_index.93 9726 9727 $input=relative_position_index.93 $shape=9726 #relative_position_index.93=(64,64)i64 #9727=(4096)i64 prim::ListConstruct pnnx_9330 1 1 9727 9728 #9727=(4096)i64 prim::Constant pnnx_9332 0 1 22917 value=64 prim::Constant pnnx_9333 0 1 22918 value=-1 prim::ListConstruct pnnx_9334 3 1 9690 22917 22918 9730 Tensor.index Tensor.index_371 2 1 relative_position_bias_table.93 9728 9729 $input=relative_position_bias_table.93 $expr=9728 #relative_position_bias_table.93=(225,6)f32 #9729=(4096,6)f32 prim::Constant pnnx_9336 0 1 22919 value=2 prim::Constant pnnx_9337 0 1 22920 value=0 prim::Constant pnnx_9338 0 1 22921 value=1 prim::ListConstruct pnnx_9339 3 1 22919 22920 22921 9732 Tensor.view Tensor.view_1481 2 1 9729 9730 relative_position_bias.93 $input=9729 $shape=9730 #9729=(4096,6)f32 #relative_position_bias.93=(64,64,6)f32 prim::Constant pnnx_9343 0 1 22923 value=0 torch.permute torch.permute_2710 2 1 relative_position_bias.93 9732 9733 $input=relative_position_bias.93 $dims=9732 #relative_position_bias.93=(64,64,6)f32 #9733=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_138 1 1 9733 relative_position_bias1.21 memory_format=torch.contiguous_format $input=9733 #9733=(6,64,64)f32 #relative_position_bias1.21=(6,64,64)f32 prim::Constant pnnx_9345 0 1 22924 value=1 torch.transpose torch.transpose_3067 3 1 k.93 9688 9689 9724 $input=k.93 $dim0=9688 $dim1=9689 #k.93=(36,6,64,32)f32 #9724=(36,6,32,64)f32 torch.matmul torch.matmul_2294 2 1 q1.21 9724 attn.187 $input=q1.21 $other=9724 #q1.21=(36,6,64,32)f32 #9724=(36,6,32,64)f32 #attn.187=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3307 2 1 relative_position_bias1.21 22923 9735 $input=relative_position_bias1.21 $dim=22923 #relative_position_bias1.21=(6,64,64)f32 #9735=(1,6,64,64)f32 aten::add pnnx_9346 3 1 attn.187 9735 22924 attn2.11 #attn.187=(36,6,64,64)f32 #9735=(1,6,64,64)f32 #attn2.11=(36,6,64,64)f32 prim::Constant pnnx_9347 0 1 22925 value=0 aten::size pnnx_9348 2 1 attn_mask.47 22925 9737 #attn_mask.47=(36,64,64)f32 prim::NumToTensor pnnx_9349 1 1 9737 other.47 aten::Int pnnx_9350 1 1 other.47 9739 prim::Constant pnnx_9351 0 1 22926 value=trunc aten::div pnnx_9352 3 1 B_.93 other.47 22926 9740 aten::Int pnnx_9353 1 1 9740 9741 prim::Constant pnnx_9354 0 1 22927 value=6 prim::ListConstruct pnnx_9355 5 1 9741 9739 22927 9708 9707 9742 prim::Constant pnnx_9357 0 1 22928 value=1 prim::Constant pnnx_9359 0 1 22929 value=0 prim::Constant pnnx_9361 0 1 22930 value=1 Tensor.view Tensor.view_1482 2 1 attn2.11 9742 9743 $input=attn2.11 $shape=9742 #attn2.11=(36,6,64,64)f32 #9743=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3308 2 1 attn_mask.47 22928 9744 $input=attn_mask.47 $dim=22928 #attn_mask.47=(36,64,64)f32 #9744=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3309 2 1 9744 22929 9745 $input=9744 $dim=22929 #9744=(36,1,64,64)f32 #9745=(1,36,1,64,64)f32 aten::add pnnx_9362 3 1 9743 9745 22930 attn3.11 #9743=(1,36,6,64,64)f32 #9745=(1,36,1,64,64)f32 #attn3.11=(1,36,6,64,64)f32 prim::Constant pnnx_9363 0 1 22931 value=-1 prim::Constant pnnx_9364 0 1 22932 value=6 prim::ListConstruct pnnx_9365 4 1 22931 22932 9706 9705 9747 Tensor.view Tensor.view_1483 2 1 attn3.11 9747 input.209 $input=attn3.11 $shape=9747 #attn3.11=(1,36,6,64,64)f32 #input.209=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.3.attn.softmax 1 1 input.209 9749 dim=-1 #input.209=(36,6,64,64)f32 #9749=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.attn.attn_drop 1 1 9749 9750 #9749=(36,6,64,64)f32 #9750=(36,6,64,64)f32 Tensor.select Tensor.select_787 3 1 qkv1.21 22914 22915 v.93 $input=qkv1.21 $dim=22914 $index=22915 #qkv1.21=(3,36,6,64,32)f32 #v.93=(36,6,64,32)f32 prim::Constant pnnx_9368 0 1 22933 value=1 prim::Constant pnnx_9369 0 1 22934 value=2 torch.matmul torch.matmul_2295 2 1 9750 v.93 9751 $input=9750 $other=v.93 #9750=(36,6,64,64)f32 #v.93=(36,6,64,32)f32 #9751=(36,6,64,32)f32 prim::ListConstruct pnnx_9371 3 1 9700 9704 9712 9753 torch.transpose torch.transpose_3068 3 1 9751 22933 22934 9752 $input=9751 $dim0=22933 $dim1=22934 #9751=(36,6,64,32)f32 #9752=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_525 2 1 9752 9753 input1.23 $input=9752 $shape=9753 #9752=(36,64,6,32)f32 #input1.23=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.3.attn.proj 1 1 input1.23 9755 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.23=(36,64,192)f32 #9755=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.attn.proj_drop 1 1 9755 9756 #9755=(36,64,192)f32 #9756=(36,64,192)f32 prim::Constant pnnx_9373 0 1 22935 value=-1 prim::Constant pnnx_9374 0 1 22936 value=8 prim::Constant pnnx_9375 0 1 22937 value=8 prim::ListConstruct pnnx_9376 4 1 22935 22936 22937 9646 9757 prim::Constant pnnx_9378 0 1 22938 value=8 prim::Constant pnnx_9379 0 1 22939 value=trunc aten::div pnnx_9380 3 1 H0.1 22938 22939 9759 aten::Int pnnx_9381 1 1 9759 9760 prim::Constant pnnx_9382 0 1 22940 value=8 prim::Constant pnnx_9383 0 1 22941 value=trunc aten::div pnnx_9384 3 1 W0.1 22940 22941 9761 aten::Int pnnx_9385 1 1 9761 9762 prim::Constant pnnx_9386 0 1 22942 value=1 prim::Constant pnnx_9387 0 1 22943 value=8 prim::Constant pnnx_9388 0 1 22944 value=8 prim::Constant pnnx_9389 0 1 22945 value=-1 prim::ListConstruct pnnx_9390 6 1 22942 9760 9762 22943 22944 22945 9763 prim::Constant pnnx_9392 0 1 22946 value=0 prim::Constant pnnx_9393 0 1 22947 value=1 prim::Constant pnnx_9394 0 1 22948 value=3 prim::Constant pnnx_9395 0 1 22949 value=2 prim::Constant pnnx_9396 0 1 22950 value=4 prim::Constant pnnx_9397 0 1 22951 value=5 prim::ListConstruct pnnx_9398 6 1 22946 22947 22948 22949 22950 22951 9765 Tensor.view Tensor.view_1484 2 1 9756 9757 windows.93 $input=9756 $shape=9757 #9756=(36,64,192)f32 #windows.93=(36,8,8,192)f32 Tensor.view Tensor.view_1485 2 1 windows.93 9763 x9.21 $input=windows.93 $shape=9763 #windows.93=(36,8,8,192)f32 #x9.21=(1,6,6,8,8,192)f32 prim::Constant pnnx_9402 0 1 22953 value=1 prim::Constant pnnx_9403 0 1 22954 value=-1 prim::ListConstruct pnnx_9404 4 1 22953 1022 1262 22954 9768 torch.permute torch.permute_2711 2 1 x9.21 9765 9766 $input=x9.21 $dims=9765 #x9.21=(1,6,6,8,8,192)f32 #9766=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_139 1 1 9766 9767 memory_format=torch.contiguous_format $input=9766 #9766=(1,6,8,6,8,192)f32 #9767=(1,6,8,6,8,192)f32 prim::Constant pnnx_9406 0 1 22955 value=4 prim::Constant pnnx_9407 0 1 22956 value=4 prim::ListConstruct pnnx_9408 2 1 22955 22956 9770 prim::Constant pnnx_9409 0 1 22957 value=1 prim::Constant pnnx_9410 0 1 22958 value=2 prim::ListConstruct pnnx_9411 2 1 22957 22958 9771 Tensor.view Tensor.view_1486 2 1 9767 9768 shifted_x.47 $input=9767 $shape=9768 #9767=(1,6,8,6,8,192)f32 #shifted_x.47=(1,48,48,192)f32 aten::mul pnnx_9413 2 1 H0.1 W0.1 9773 aten::Int pnnx_9414 1 1 9773 9774 prim::ListConstruct pnnx_9415 3 1 9641 9774 9645 9775 prim::Constant pnnx_9417 0 1 9777 value=None prim::Constant pnnx_9418 0 1 22959 value=1 torch.roll torch.roll_2465 3 1 shifted_x.47 9770 9771 x10.11 $input=shifted_x.47 $shifts=9770 $dims=9771 #shifted_x.47=(1,48,48,192)f32 #x10.11=(1,48,48,192)f32 Tensor.view Tensor.view_1487 2 1 x10.11 9775 x11.11 $input=x10.11 $shape=9775 #x10.11=(1,48,48,192)f32 #x11.11=(1,2304,192)f32 aten::add pnnx_9419 3 1 9620 x11.11 22959 input.211 #9620=(1,2304,192)f32 #x11.11=(1,2304,192)f32 #input.211=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.3.norm2 1 1 input.211 9779 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.211=(1,2304,192)f32 #9779=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.3.mlp.fc1 1 1 9779 9784 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #9779=(1,2304,192)f32 #9784=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.3.mlp.act 1 1 9784 9785 #9784=(1,2304,384)f32 #9785=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.mlp.drop 1 1 9785 9786 #9785=(1,2304,384)f32 #9786=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.3.mlp.fc2 1 1 9786 9787 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #9786=(1,2304,384)f32 #9787=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.3.mlp.drop 1 1 9787 9788 #9787=(1,2304,192)f32 #9788=(1,2304,192)f32 prim::Constant pnnx_9420 0 1 9789 value=None prim::Constant pnnx_9421 0 1 22960 value=1 aten::add pnnx_9422 3 1 input.211 9788 22960 9790 #input.211=(1,2304,192)f32 #9788=(1,2304,192)f32 #9790=(1,2304,192)f32 prim::Constant pnnx_9423 0 1 9791 value=trunc prim::Constant pnnx_9424 0 1 9792 value=8 prim::Constant pnnx_9425 0 1 9793 value=0 prim::Constant pnnx_9426 0 1 9794 value=2 prim::Constant pnnx_9427 0 1 9795 value=1 prim::Constant pnnx_9428 0 1 9796 value=3 prim::Constant pnnx_9429 0 1 9797 value=8 prim::Constant pnnx_9430 0 1 9798 value=4 prim::Constant pnnx_9431 0 1 9799 value=5 prim::Constant pnnx_9432 0 1 9800 value=-1 prim::Constant pnnx_9433 0 1 9801 value=64 aten::size pnnx_9434 2 1 9790 9793 9807 #9790=(1,2304,192)f32 prim::NumToTensor pnnx_9435 1 1 9807 B.113 aten::Int pnnx_9436 1 1 B.113 9809 aten::Int pnnx_9437 1 1 B.113 9810 aten::size pnnx_9438 2 1 9790 9794 9811 #9790=(1,2304,192)f32 prim::NumToTensor pnnx_9439 1 1 9811 C.195 aten::Int pnnx_9440 1 1 C.195 9813 aten::Int pnnx_9441 1 1 C.195 9814 aten::Int pnnx_9442 1 1 C.195 9815 aten::Int pnnx_9443 1 1 C.195 9816 nn.LayerNorm layers_dfe.1.residual_group.blocks.4.norm1 1 1 9790 9817 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #9790=(1,2304,192)f32 #9817=(1,2304,192)f32 prim::ListConstruct pnnx_9444 4 1 9810 1019 1259 9816 9818 prim::Constant pnnx_9446 0 1 22961 value=0 Tensor.view Tensor.view_1488 2 1 9817 9818 x.95 $input=9817 $shape=9818 #9817=(1,2304,192)f32 #x.95=(1,48,48,192)f32 aten::size pnnx_9447 2 1 x.95 22961 9820 #x.95=(1,48,48,192)f32 prim::NumToTensor pnnx_9448 1 1 9820 B1.23 aten::Int pnnx_9449 1 1 B1.23 9822 aten::size pnnx_9450 2 1 x.95 9795 9823 #x.95=(1,48,48,192)f32 prim::NumToTensor pnnx_9451 1 1 9823 9824 prim::Constant pnnx_9452 0 1 22962 value=2 aten::size pnnx_9453 2 1 x.95 22962 9825 #x.95=(1,48,48,192)f32 prim::NumToTensor pnnx_9454 1 1 9825 9826 aten::size pnnx_9455 2 1 x.95 9796 9827 #x.95=(1,48,48,192)f32 prim::NumToTensor pnnx_9456 1 1 9827 C1.23 aten::Int pnnx_9457 1 1 C1.23 9829 aten::Int pnnx_9458 1 1 C1.23 9830 aten::div pnnx_9459 3 1 9824 9792 9791 9831 aten::Int pnnx_9460 1 1 9831 9832 prim::Constant pnnx_9461 0 1 22963 value=8 prim::Constant pnnx_9462 0 1 22964 value=trunc aten::div pnnx_9463 3 1 9826 22963 22964 9833 aten::Int pnnx_9464 1 1 9833 9834 prim::Constant pnnx_9465 0 1 22965 value=8 prim::ListConstruct pnnx_9466 6 1 9822 9832 9797 9834 22965 9830 9835 prim::Constant pnnx_9468 0 1 22966 value=0 prim::Constant pnnx_9469 0 1 22967 value=1 prim::Constant pnnx_9470 0 1 22968 value=3 prim::Constant pnnx_9471 0 1 22969 value=2 prim::ListConstruct pnnx_9472 6 1 22966 22967 22968 22969 9798 9799 9837 Tensor.view Tensor.view_1489 2 1 x.95 9835 x5.49 $input=x.95 $shape=9835 #x.95=(1,48,48,192)f32 #x5.49=(1,6,8,6,8,192)f32 prim::Constant pnnx_9476 0 1 22971 value=8 prim::Constant pnnx_9477 0 1 22972 value=8 prim::ListConstruct pnnx_9478 4 1 9800 22971 22972 9829 9840 torch.permute torch.permute_2712 2 1 x5.49 9837 9838 $input=x5.49 $dims=9837 #x5.49=(1,6,8,6,8,192)f32 #9838=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_140 1 1 9838 9839 memory_format=torch.contiguous_format $input=9838 #9838=(1,6,6,8,8,192)f32 #9839=(1,6,6,8,8,192)f32 prim::Constant pnnx_9480 0 1 22973 value=-1 prim::ListConstruct pnnx_9481 3 1 22973 9801 9815 9842 prim::Constant pnnx_9483 0 1 9844 value=1.767767e-01 prim::Constant pnnx_9484 0 1 9845 value=trunc prim::Constant pnnx_9485 0 1 9846 value=6 prim::Constant pnnx_9486 0 1 9847 value=0 prim::Constant pnnx_9487 0 1 9848 value=1 prim::Constant pnnx_9488 0 1 9849 value=2 prim::Constant pnnx_9489 0 1 9850 value=3 prim::Constant pnnx_9490 0 1 9851 value=6 prim::Constant pnnx_9491 0 1 9852 value=4 prim::Constant pnnx_9492 0 1 9853 value=-2 prim::Constant pnnx_9493 0 1 9854 value=-1 prim::Constant pnnx_9494 0 1 9855 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.4.attn 0 1 relative_position_bias_table.95 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.95=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.4.attn 0 1 relative_position_index.95 @relative_position_index=(64,64)i64 #relative_position_index.95=(64,64)i64 Tensor.view Tensor.view_1490 2 1 9839 9840 x_windows.95 $input=9839 $shape=9840 #9839=(1,6,6,8,8,192)f32 #x_windows.95=(36,8,8,192)f32 Tensor.view Tensor.view_1491 2 1 x_windows.95 9842 x6.23 $input=x_windows.95 $shape=9842 #x_windows.95=(36,8,8,192)f32 #x6.23=(36,64,192)f32 aten::size pnnx_9495 2 1 x6.23 9847 9863 #x6.23=(36,64,192)f32 prim::NumToTensor pnnx_9496 1 1 9863 B_.95 aten::Int pnnx_9497 1 1 B_.95 9865 aten::Int pnnx_9498 1 1 B_.95 9866 aten::size pnnx_9499 2 1 x6.23 9848 9867 #x6.23=(36,64,192)f32 prim::NumToTensor pnnx_9500 1 1 9867 N.95 aten::Int pnnx_9501 1 1 N.95 9869 aten::Int pnnx_9502 1 1 N.95 9870 aten::size pnnx_9503 2 1 x6.23 9849 9871 #x6.23=(36,64,192)f32 prim::NumToTensor pnnx_9504 1 1 9871 C.197 aten::Int pnnx_9505 1 1 C.197 9873 nn.Linear layers_dfe.1.residual_group.blocks.4.attn.qkv 1 1 x6.23 9874 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.23=(36,64,192)f32 #9874=(36,64,576)f32 aten::div pnnx_9506 3 1 C.197 9846 9845 9875 aten::Int pnnx_9507 1 1 9875 9876 prim::ListConstruct pnnx_9508 5 1 9866 9870 9850 9851 9876 9877 prim::Constant pnnx_9510 0 1 22974 value=2 prim::Constant pnnx_9511 0 1 22975 value=0 prim::Constant pnnx_9512 0 1 22976 value=3 prim::Constant pnnx_9513 0 1 22977 value=1 prim::ListConstruct pnnx_9514 5 1 22974 22975 22976 22977 9852 9879 Tensor.reshape Tensor.reshape_526 2 1 9874 9877 9878 $input=9874 $shape=9877 #9874=(36,64,576)f32 #9878=(36,64,3,6,32)f32 prim::Constant pnnx_9516 0 1 22978 value=0 prim::Constant pnnx_9517 0 1 22979 value=0 prim::Constant pnnx_9519 0 1 22980 value=0 prim::Constant pnnx_9520 0 1 22981 value=1 prim::Constant pnnx_9522 0 1 22982 value=0 prim::Constant pnnx_9523 0 1 22983 value=2 torch.permute torch.permute_2713 2 1 9878 9879 qkv1.23 $input=9878 $dims=9879 #9878=(36,64,3,6,32)f32 #qkv1.23=(3,36,6,64,32)f32 Tensor.select Tensor.select_788 3 1 qkv1.23 22978 22979 q.95 $input=qkv1.23 $dim=22978 $index=22979 #qkv1.23=(3,36,6,64,32)f32 #q.95=(36,6,64,32)f32 aten::mul pnnx_9525 2 1 q.95 9844 q1.23 #q.95=(36,6,64,32)f32 #q1.23=(36,6,64,32)f32 Tensor.select Tensor.select_789 3 1 qkv1.23 22980 22981 k.95 $input=qkv1.23 $dim=22980 $index=22981 #qkv1.23=(3,36,6,64,32)f32 #k.95=(36,6,64,32)f32 prim::Constant pnnx_9528 0 1 22984 value=-1 prim::ListConstruct pnnx_9529 1 1 22984 9887 Tensor.view Tensor.view_1492 2 1 relative_position_index.95 9887 9888 $input=relative_position_index.95 $shape=9887 #relative_position_index.95=(64,64)i64 #9888=(4096)i64 prim::ListConstruct pnnx_9531 1 1 9888 9889 #9888=(4096)i64 prim::Constant pnnx_9533 0 1 22985 value=64 prim::Constant pnnx_9534 0 1 22986 value=-1 prim::ListConstruct pnnx_9535 3 1 9855 22985 22986 9891 Tensor.index Tensor.index_372 2 1 relative_position_bias_table.95 9889 9890 $input=relative_position_bias_table.95 $expr=9889 #relative_position_bias_table.95=(225,6)f32 #9890=(4096,6)f32 prim::Constant pnnx_9537 0 1 22987 value=2 prim::Constant pnnx_9538 0 1 22988 value=0 prim::Constant pnnx_9539 0 1 22989 value=1 prim::ListConstruct pnnx_9540 3 1 22987 22988 22989 9893 Tensor.view Tensor.view_1493 2 1 9890 9891 relative_position_bias.95 $input=9890 $shape=9891 #9890=(4096,6)f32 #relative_position_bias.95=(64,64,6)f32 prim::Constant pnnx_9544 0 1 22991 value=0 torch.permute torch.permute_2714 2 1 relative_position_bias.95 9893 9894 $input=relative_position_bias.95 $dims=9893 #relative_position_bias.95=(64,64,6)f32 #9894=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_141 1 1 9894 relative_position_bias1.23 memory_format=torch.contiguous_format $input=9894 #9894=(6,64,64)f32 #relative_position_bias1.23=(6,64,64)f32 prim::Constant pnnx_9546 0 1 22992 value=1 torch.transpose torch.transpose_3069 3 1 k.95 9853 9854 9885 $input=k.95 $dim0=9853 $dim1=9854 #k.95=(36,6,64,32)f32 #9885=(36,6,32,64)f32 torch.matmul torch.matmul_2296 2 1 q1.23 9885 attn.191 $input=q1.23 $other=9885 #q1.23=(36,6,64,32)f32 #9885=(36,6,32,64)f32 #attn.191=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3310 2 1 relative_position_bias1.23 22991 9896 $input=relative_position_bias1.23 $dim=22991 #relative_position_bias1.23=(6,64,64)f32 #9896=(1,6,64,64)f32 aten::add pnnx_9547 3 1 attn.191 9896 22992 input.213 #attn.191=(36,6,64,64)f32 #9896=(1,6,64,64)f32 #input.213=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.4.attn.softmax 1 1 input.213 9898 dim=-1 #input.213=(36,6,64,64)f32 #9898=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.attn.attn_drop 1 1 9898 9899 #9898=(36,6,64,64)f32 #9899=(36,6,64,64)f32 Tensor.select Tensor.select_790 3 1 qkv1.23 22982 22983 v.95 $input=qkv1.23 $dim=22982 $index=22983 #qkv1.23=(3,36,6,64,32)f32 #v.95=(36,6,64,32)f32 prim::Constant pnnx_9549 0 1 22993 value=1 prim::Constant pnnx_9550 0 1 22994 value=2 torch.matmul torch.matmul_2297 2 1 9899 v.95 9900 $input=9899 $other=v.95 #9899=(36,6,64,64)f32 #v.95=(36,6,64,32)f32 #9900=(36,6,64,32)f32 prim::ListConstruct pnnx_9552 3 1 9865 9869 9873 9902 torch.transpose torch.transpose_3070 3 1 9900 22993 22994 9901 $input=9900 $dim0=22993 $dim1=22994 #9900=(36,6,64,32)f32 #9901=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_527 2 1 9901 9902 input1.25 $input=9901 $shape=9902 #9901=(36,64,6,32)f32 #input1.25=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.4.attn.proj 1 1 input1.25 9904 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.25=(36,64,192)f32 #9904=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.attn.proj_drop 1 1 9904 9905 #9904=(36,64,192)f32 #9905=(36,64,192)f32 prim::Constant pnnx_9554 0 1 22995 value=-1 prim::Constant pnnx_9555 0 1 22996 value=8 prim::Constant pnnx_9556 0 1 22997 value=8 prim::ListConstruct pnnx_9557 4 1 22995 22996 22997 9814 9906 prim::Constant pnnx_9559 0 1 22998 value=8 prim::Constant pnnx_9560 0 1 22999 value=trunc aten::div pnnx_9561 3 1 H0.1 22998 22999 9908 aten::Int pnnx_9562 1 1 9908 9909 prim::Constant pnnx_9563 0 1 23000 value=8 prim::Constant pnnx_9564 0 1 23001 value=trunc aten::div pnnx_9565 3 1 W0.1 23000 23001 9910 aten::Int pnnx_9566 1 1 9910 9911 prim::Constant pnnx_9567 0 1 23002 value=1 prim::Constant pnnx_9568 0 1 23003 value=8 prim::Constant pnnx_9569 0 1 23004 value=8 prim::Constant pnnx_9570 0 1 23005 value=-1 prim::ListConstruct pnnx_9571 6 1 23002 9909 9911 23003 23004 23005 9912 prim::Constant pnnx_9573 0 1 23006 value=0 prim::Constant pnnx_9574 0 1 23007 value=1 prim::Constant pnnx_9575 0 1 23008 value=3 prim::Constant pnnx_9576 0 1 23009 value=2 prim::Constant pnnx_9577 0 1 23010 value=4 prim::Constant pnnx_9578 0 1 23011 value=5 prim::ListConstruct pnnx_9579 6 1 23006 23007 23008 23009 23010 23011 9914 Tensor.view Tensor.view_1494 2 1 9905 9906 windows.95 $input=9905 $shape=9906 #9905=(36,64,192)f32 #windows.95=(36,8,8,192)f32 Tensor.view Tensor.view_1495 2 1 windows.95 9912 x7.23 $input=windows.95 $shape=9912 #windows.95=(36,8,8,192)f32 #x7.23=(1,6,6,8,8,192)f32 prim::Constant pnnx_9583 0 1 23013 value=1 prim::Constant pnnx_9584 0 1 23014 value=-1 prim::ListConstruct pnnx_9585 4 1 23013 1016 1256 23014 9917 torch.permute torch.permute_2715 2 1 x7.23 9914 9915 $input=x7.23 $dims=9914 #x7.23=(1,6,6,8,8,192)f32 #9915=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_142 1 1 9915 9916 memory_format=torch.contiguous_format $input=9915 #9915=(1,6,8,6,8,192)f32 #9916=(1,6,8,6,8,192)f32 aten::mul pnnx_9587 2 1 H0.1 W0.1 9919 aten::Int pnnx_9588 1 1 9919 9920 prim::ListConstruct pnnx_9589 3 1 9809 9920 9813 9921 prim::Constant pnnx_9591 0 1 9923 value=None prim::Constant pnnx_9592 0 1 23015 value=1 Tensor.view Tensor.view_1496 2 1 9916 9917 x8.23 $input=9916 $shape=9917 #9916=(1,6,8,6,8,192)f32 #x8.23=(1,48,48,192)f32 Tensor.view Tensor.view_1497 2 1 x8.23 9921 x9.23 $input=x8.23 $shape=9921 #x8.23=(1,48,48,192)f32 #x9.23=(1,2304,192)f32 aten::add pnnx_9593 3 1 9790 x9.23 23015 input.215 #9790=(1,2304,192)f32 #x9.23=(1,2304,192)f32 #input.215=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.4.norm2 1 1 input.215 9925 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.215=(1,2304,192)f32 #9925=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.4.mlp.fc1 1 1 9925 9930 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #9925=(1,2304,192)f32 #9930=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.4.mlp.act 1 1 9930 9931 #9930=(1,2304,384)f32 #9931=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.mlp.drop 1 1 9931 9932 #9931=(1,2304,384)f32 #9932=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.4.mlp.fc2 1 1 9932 9933 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #9932=(1,2304,384)f32 #9933=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.4.mlp.drop 1 1 9933 9934 #9933=(1,2304,192)f32 #9934=(1,2304,192)f32 prim::Constant pnnx_9594 0 1 9935 value=None prim::Constant pnnx_9595 0 1 23016 value=1 aten::add pnnx_9596 3 1 input.215 9934 23016 9936 #input.215=(1,2304,192)f32 #9934=(1,2304,192)f32 #9936=(1,2304,192)f32 prim::Constant pnnx_9597 0 1 9937 value=trunc prim::Constant pnnx_9598 0 1 9938 value=8 prim::Constant pnnx_9599 0 1 9939 value=0 prim::Constant pnnx_9600 0 1 9940 value=2 prim::Constant pnnx_9601 0 1 9941 value=-4 prim::Constant pnnx_9602 0 1 9942 value=1 prim::Constant pnnx_9603 0 1 9943 value=3 prim::Constant pnnx_9604 0 1 9944 value=8 prim::Constant pnnx_9605 0 1 9945 value=4 prim::Constant pnnx_9606 0 1 9946 value=5 prim::Constant pnnx_9607 0 1 9947 value=-1 prim::Constant pnnx_9608 0 1 9948 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.5 0 1 attn_mask.49 @attn_mask=(36,64,64)f32 #attn_mask.49=(36,64,64)f32 aten::size pnnx_9609 2 1 9936 9939 9955 #9936=(1,2304,192)f32 prim::NumToTensor pnnx_9610 1 1 9955 B.115 aten::Int pnnx_9611 1 1 B.115 9957 aten::Int pnnx_9612 1 1 B.115 9958 aten::size pnnx_9613 2 1 9936 9940 9959 #9936=(1,2304,192)f32 prim::NumToTensor pnnx_9614 1 1 9959 C.199 aten::Int pnnx_9615 1 1 C.199 9961 aten::Int pnnx_9616 1 1 C.199 9962 aten::Int pnnx_9617 1 1 C.199 9963 aten::Int pnnx_9618 1 1 C.199 9964 nn.LayerNorm layers_dfe.1.residual_group.blocks.5.norm1 1 1 9936 9965 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #9936=(1,2304,192)f32 #9965=(1,2304,192)f32 prim::ListConstruct pnnx_9619 4 1 9958 1013 1253 9964 9966 prim::Constant pnnx_9621 0 1 23017 value=-4 prim::ListConstruct pnnx_9622 2 1 9941 23017 9968 prim::Constant pnnx_9623 0 1 23018 value=2 prim::ListConstruct pnnx_9624 2 1 9942 23018 9969 Tensor.view Tensor.view_1498 2 1 9965 9966 x.97 $input=9965 $shape=9966 #9965=(1,2304,192)f32 #x.97=(1,48,48,192)f32 prim::Constant pnnx_9626 0 1 23019 value=0 torch.roll torch.roll_2466 3 1 x.97 9968 9969 x6.25 $input=x.97 $shifts=9968 $dims=9969 #x.97=(1,48,48,192)f32 #x6.25=(1,48,48,192)f32 aten::size pnnx_9627 2 1 x6.25 23019 9971 #x6.25=(1,48,48,192)f32 prim::NumToTensor pnnx_9628 1 1 9971 B1.25 aten::Int pnnx_9629 1 1 B1.25 9973 prim::Constant pnnx_9630 0 1 23020 value=1 aten::size pnnx_9631 2 1 x6.25 23020 9974 #x6.25=(1,48,48,192)f32 prim::NumToTensor pnnx_9632 1 1 9974 9975 prim::Constant pnnx_9633 0 1 23021 value=2 aten::size pnnx_9634 2 1 x6.25 23021 9976 #x6.25=(1,48,48,192)f32 prim::NumToTensor pnnx_9635 1 1 9976 9977 aten::size pnnx_9636 2 1 x6.25 9943 9978 #x6.25=(1,48,48,192)f32 prim::NumToTensor pnnx_9637 1 1 9978 C1.25 aten::Int pnnx_9638 1 1 C1.25 9980 aten::Int pnnx_9639 1 1 C1.25 9981 aten::div pnnx_9640 3 1 9975 9938 9937 9982 aten::Int pnnx_9641 1 1 9982 9983 prim::Constant pnnx_9642 0 1 23022 value=8 prim::Constant pnnx_9643 0 1 23023 value=trunc aten::div pnnx_9644 3 1 9977 23022 23023 9984 aten::Int pnnx_9645 1 1 9984 9985 prim::Constant pnnx_9646 0 1 23024 value=8 prim::ListConstruct pnnx_9647 6 1 9973 9983 9944 9985 23024 9981 9986 prim::Constant pnnx_9649 0 1 23025 value=0 prim::Constant pnnx_9650 0 1 23026 value=1 prim::Constant pnnx_9651 0 1 23027 value=3 prim::Constant pnnx_9652 0 1 23028 value=2 prim::ListConstruct pnnx_9653 6 1 23025 23026 23027 23028 9945 9946 9988 Tensor.view Tensor.view_1499 2 1 x6.25 9986 x7.25 $input=x6.25 $shape=9986 #x6.25=(1,48,48,192)f32 #x7.25=(1,6,8,6,8,192)f32 prim::Constant pnnx_9657 0 1 23030 value=8 prim::Constant pnnx_9658 0 1 23031 value=8 prim::ListConstruct pnnx_9659 4 1 9947 23030 23031 9980 9991 torch.permute torch.permute_2716 2 1 x7.25 9988 9989 $input=x7.25 $dims=9988 #x7.25=(1,6,8,6,8,192)f32 #9989=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_143 1 1 9989 9990 memory_format=torch.contiguous_format $input=9989 #9989=(1,6,6,8,8,192)f32 #9990=(1,6,6,8,8,192)f32 prim::Constant pnnx_9661 0 1 23032 value=-1 prim::ListConstruct pnnx_9662 3 1 23032 9948 9963 9993 prim::Constant pnnx_9664 0 1 9995 value=1.767767e-01 prim::Constant pnnx_9665 0 1 9996 value=trunc prim::Constant pnnx_9666 0 1 9997 value=6 prim::Constant pnnx_9667 0 1 9998 value=0 prim::Constant pnnx_9668 0 1 9999 value=1 prim::Constant pnnx_9669 0 1 10000 value=2 prim::Constant pnnx_9670 0 1 10001 value=3 prim::Constant pnnx_9671 0 1 10002 value=6 prim::Constant pnnx_9672 0 1 10003 value=4 prim::Constant pnnx_9673 0 1 10004 value=-2 prim::Constant pnnx_9674 0 1 10005 value=-1 prim::Constant pnnx_9675 0 1 10006 value=64 pnnx.Attribute layers_dfe.1.residual_group.blocks.5.attn 0 1 relative_position_bias_table.97 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.97=(225,6)f32 pnnx.Attribute layers_dfe.1.residual_group.blocks.5.attn 0 1 relative_position_index.97 @relative_position_index=(64,64)i64 #relative_position_index.97=(64,64)i64 Tensor.view Tensor.view_1500 2 1 9990 9991 x_windows.97 $input=9990 $shape=9991 #9990=(1,6,6,8,8,192)f32 #x_windows.97=(36,8,8,192)f32 Tensor.view Tensor.view_1501 2 1 x_windows.97 9993 x8.25 $input=x_windows.97 $shape=9993 #x_windows.97=(36,8,8,192)f32 #x8.25=(36,64,192)f32 aten::size pnnx_9676 2 1 x8.25 9998 10014 #x8.25=(36,64,192)f32 prim::NumToTensor pnnx_9677 1 1 10014 B_.97 aten::Int pnnx_9678 1 1 B_.97 10016 aten::Int pnnx_9679 1 1 B_.97 10017 aten::size pnnx_9680 2 1 x8.25 9999 10018 #x8.25=(36,64,192)f32 prim::NumToTensor pnnx_9681 1 1 10018 N.97 aten::Int pnnx_9682 1 1 N.97 10020 aten::Int pnnx_9683 1 1 N.97 10021 aten::Int pnnx_9684 1 1 N.97 10022 aten::Int pnnx_9685 1 1 N.97 10023 aten::Int pnnx_9686 1 1 N.97 10024 aten::Int pnnx_9687 1 1 N.97 10025 aten::size pnnx_9688 2 1 x8.25 10000 10026 #x8.25=(36,64,192)f32 prim::NumToTensor pnnx_9689 1 1 10026 C.201 aten::Int pnnx_9690 1 1 C.201 10028 nn.Linear layers_dfe.1.residual_group.blocks.5.attn.qkv 1 1 x8.25 10029 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.25=(36,64,192)f32 #10029=(36,64,576)f32 aten::div pnnx_9691 3 1 C.201 9997 9996 10030 aten::Int pnnx_9692 1 1 10030 10031 prim::ListConstruct pnnx_9693 5 1 10017 10025 10001 10002 10031 10032 prim::Constant pnnx_9695 0 1 23033 value=2 prim::Constant pnnx_9696 0 1 23034 value=0 prim::Constant pnnx_9697 0 1 23035 value=3 prim::Constant pnnx_9698 0 1 23036 value=1 prim::ListConstruct pnnx_9699 5 1 23033 23034 23035 23036 10003 10034 Tensor.reshape Tensor.reshape_528 2 1 10029 10032 10033 $input=10029 $shape=10032 #10029=(36,64,576)f32 #10033=(36,64,3,6,32)f32 prim::Constant pnnx_9701 0 1 23037 value=0 prim::Constant pnnx_9702 0 1 23038 value=0 prim::Constant pnnx_9704 0 1 23039 value=0 prim::Constant pnnx_9705 0 1 23040 value=1 prim::Constant pnnx_9707 0 1 23041 value=0 prim::Constant pnnx_9708 0 1 23042 value=2 torch.permute torch.permute_2717 2 1 10033 10034 qkv1.25 $input=10033 $dims=10034 #10033=(36,64,3,6,32)f32 #qkv1.25=(3,36,6,64,32)f32 Tensor.select Tensor.select_791 3 1 qkv1.25 23037 23038 q.97 $input=qkv1.25 $dim=23037 $index=23038 #qkv1.25=(3,36,6,64,32)f32 #q.97=(36,6,64,32)f32 aten::mul pnnx_9710 2 1 q.97 9995 q1.25 #q.97=(36,6,64,32)f32 #q1.25=(36,6,64,32)f32 Tensor.select Tensor.select_792 3 1 qkv1.25 23039 23040 k.97 $input=qkv1.25 $dim=23039 $index=23040 #qkv1.25=(3,36,6,64,32)f32 #k.97=(36,6,64,32)f32 prim::Constant pnnx_9713 0 1 23043 value=-1 prim::ListConstruct pnnx_9714 1 1 23043 10042 Tensor.view Tensor.view_1502 2 1 relative_position_index.97 10042 10043 $input=relative_position_index.97 $shape=10042 #relative_position_index.97=(64,64)i64 #10043=(4096)i64 prim::ListConstruct pnnx_9716 1 1 10043 10044 #10043=(4096)i64 prim::Constant pnnx_9718 0 1 23044 value=64 prim::Constant pnnx_9719 0 1 23045 value=-1 prim::ListConstruct pnnx_9720 3 1 10006 23044 23045 10046 Tensor.index Tensor.index_373 2 1 relative_position_bias_table.97 10044 10045 $input=relative_position_bias_table.97 $expr=10044 #relative_position_bias_table.97=(225,6)f32 #10045=(4096,6)f32 prim::Constant pnnx_9722 0 1 23046 value=2 prim::Constant pnnx_9723 0 1 23047 value=0 prim::Constant pnnx_9724 0 1 23048 value=1 prim::ListConstruct pnnx_9725 3 1 23046 23047 23048 10048 Tensor.view Tensor.view_1503 2 1 10045 10046 relative_position_bias.97 $input=10045 $shape=10046 #10045=(4096,6)f32 #relative_position_bias.97=(64,64,6)f32 prim::Constant pnnx_9729 0 1 23050 value=0 torch.permute torch.permute_2718 2 1 relative_position_bias.97 10048 10049 $input=relative_position_bias.97 $dims=10048 #relative_position_bias.97=(64,64,6)f32 #10049=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_144 1 1 10049 relative_position_bias1.25 memory_format=torch.contiguous_format $input=10049 #10049=(6,64,64)f32 #relative_position_bias1.25=(6,64,64)f32 prim::Constant pnnx_9731 0 1 23051 value=1 torch.transpose torch.transpose_3071 3 1 k.97 10004 10005 10040 $input=k.97 $dim0=10004 $dim1=10005 #k.97=(36,6,64,32)f32 #10040=(36,6,32,64)f32 torch.matmul torch.matmul_2298 2 1 q1.25 10040 attn.195 $input=q1.25 $other=10040 #q1.25=(36,6,64,32)f32 #10040=(36,6,32,64)f32 #attn.195=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3311 2 1 relative_position_bias1.25 23050 10051 $input=relative_position_bias1.25 $dim=23050 #relative_position_bias1.25=(6,64,64)f32 #10051=(1,6,64,64)f32 aten::add pnnx_9732 3 1 attn.195 10051 23051 attn2.13 #attn.195=(36,6,64,64)f32 #10051=(1,6,64,64)f32 #attn2.13=(36,6,64,64)f32 prim::Constant pnnx_9733 0 1 23052 value=0 aten::size pnnx_9734 2 1 attn_mask.49 23052 10053 #attn_mask.49=(36,64,64)f32 prim::NumToTensor pnnx_9735 1 1 10053 other.49 aten::Int pnnx_9736 1 1 other.49 10055 prim::Constant pnnx_9737 0 1 23053 value=trunc aten::div pnnx_9738 3 1 B_.97 other.49 23053 10056 aten::Int pnnx_9739 1 1 10056 10057 prim::Constant pnnx_9740 0 1 23054 value=6 prim::ListConstruct pnnx_9741 5 1 10057 10055 23054 10024 10023 10058 prim::Constant pnnx_9743 0 1 23055 value=1 prim::Constant pnnx_9745 0 1 23056 value=0 prim::Constant pnnx_9747 0 1 23057 value=1 Tensor.view Tensor.view_1504 2 1 attn2.13 10058 10059 $input=attn2.13 $shape=10058 #attn2.13=(36,6,64,64)f32 #10059=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3312 2 1 attn_mask.49 23055 10060 $input=attn_mask.49 $dim=23055 #attn_mask.49=(36,64,64)f32 #10060=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3313 2 1 10060 23056 10061 $input=10060 $dim=23056 #10060=(36,1,64,64)f32 #10061=(1,36,1,64,64)f32 aten::add pnnx_9748 3 1 10059 10061 23057 attn3.13 #10059=(1,36,6,64,64)f32 #10061=(1,36,1,64,64)f32 #attn3.13=(1,36,6,64,64)f32 prim::Constant pnnx_9749 0 1 23058 value=-1 prim::Constant pnnx_9750 0 1 23059 value=6 prim::ListConstruct pnnx_9751 4 1 23058 23059 10022 10021 10063 Tensor.view Tensor.view_1505 2 1 attn3.13 10063 input.217 $input=attn3.13 $shape=10063 #attn3.13=(1,36,6,64,64)f32 #input.217=(36,6,64,64)f32 nn.Softmax layers_dfe.1.residual_group.blocks.5.attn.softmax 1 1 input.217 10065 dim=-1 #input.217=(36,6,64,64)f32 #10065=(36,6,64,64)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.attn.attn_drop 1 1 10065 10066 #10065=(36,6,64,64)f32 #10066=(36,6,64,64)f32 Tensor.select Tensor.select_793 3 1 qkv1.25 23041 23042 v.97 $input=qkv1.25 $dim=23041 $index=23042 #qkv1.25=(3,36,6,64,32)f32 #v.97=(36,6,64,32)f32 prim::Constant pnnx_9754 0 1 23060 value=1 prim::Constant pnnx_9755 0 1 23061 value=2 torch.matmul torch.matmul_2299 2 1 10066 v.97 10067 $input=10066 $other=v.97 #10066=(36,6,64,64)f32 #v.97=(36,6,64,32)f32 #10067=(36,6,64,32)f32 prim::ListConstruct pnnx_9757 3 1 10016 10020 10028 10069 torch.transpose torch.transpose_3072 3 1 10067 23060 23061 10068 $input=10067 $dim0=23060 $dim1=23061 #10067=(36,6,64,32)f32 #10068=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_529 2 1 10068 10069 input1.27 $input=10068 $shape=10069 #10068=(36,64,6,32)f32 #input1.27=(36,64,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.5.attn.proj 1 1 input1.27 10071 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.27=(36,64,192)f32 #10071=(36,64,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.attn.proj_drop 1 1 10071 10072 #10071=(36,64,192)f32 #10072=(36,64,192)f32 prim::Constant pnnx_9759 0 1 23062 value=-1 prim::Constant pnnx_9760 0 1 23063 value=8 prim::Constant pnnx_9761 0 1 23064 value=8 prim::ListConstruct pnnx_9762 4 1 23062 23063 23064 9962 10073 prim::Constant pnnx_9764 0 1 23065 value=8 prim::Constant pnnx_9765 0 1 23066 value=trunc aten::div pnnx_9766 3 1 H0.1 23065 23066 10075 aten::Int pnnx_9767 1 1 10075 10076 prim::Constant pnnx_9768 0 1 23067 value=8 prim::Constant pnnx_9769 0 1 23068 value=trunc aten::div pnnx_9770 3 1 W0.1 23067 23068 10077 aten::Int pnnx_9771 1 1 10077 10078 prim::Constant pnnx_9772 0 1 23069 value=1 prim::Constant pnnx_9773 0 1 23070 value=8 prim::Constant pnnx_9774 0 1 23071 value=8 prim::Constant pnnx_9775 0 1 23072 value=-1 prim::ListConstruct pnnx_9776 6 1 23069 10076 10078 23070 23071 23072 10079 prim::Constant pnnx_9778 0 1 23073 value=0 prim::Constant pnnx_9779 0 1 23074 value=1 prim::Constant pnnx_9780 0 1 23075 value=3 prim::Constant pnnx_9781 0 1 23076 value=2 prim::Constant pnnx_9782 0 1 23077 value=4 prim::Constant pnnx_9783 0 1 23078 value=5 prim::ListConstruct pnnx_9784 6 1 23073 23074 23075 23076 23077 23078 10081 Tensor.view Tensor.view_1506 2 1 10072 10073 windows.97 $input=10072 $shape=10073 #10072=(36,64,192)f32 #windows.97=(36,8,8,192)f32 Tensor.view Tensor.view_1507 2 1 windows.97 10079 x9.25 $input=windows.97 $shape=10079 #windows.97=(36,8,8,192)f32 #x9.25=(1,6,6,8,8,192)f32 prim::Constant pnnx_9788 0 1 23080 value=1 prim::Constant pnnx_9789 0 1 23081 value=-1 prim::ListConstruct pnnx_9790 4 1 23080 1010 1250 23081 10084 torch.permute torch.permute_2719 2 1 x9.25 10081 10082 $input=x9.25 $dims=10081 #x9.25=(1,6,6,8,8,192)f32 #10082=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_145 1 1 10082 10083 memory_format=torch.contiguous_format $input=10082 #10082=(1,6,8,6,8,192)f32 #10083=(1,6,8,6,8,192)f32 prim::Constant pnnx_9792 0 1 23082 value=4 prim::Constant pnnx_9793 0 1 23083 value=4 prim::ListConstruct pnnx_9794 2 1 23082 23083 10086 prim::Constant pnnx_9795 0 1 23084 value=1 prim::Constant pnnx_9796 0 1 23085 value=2 prim::ListConstruct pnnx_9797 2 1 23084 23085 10087 Tensor.view Tensor.view_1508 2 1 10083 10084 shifted_x.49 $input=10083 $shape=10084 #10083=(1,6,8,6,8,192)f32 #shifted_x.49=(1,48,48,192)f32 aten::mul pnnx_9799 2 1 H0.1 W0.1 10089 aten::Int pnnx_9800 1 1 10089 10090 prim::ListConstruct pnnx_9801 3 1 9957 10090 9961 10091 prim::Constant pnnx_9803 0 1 10093 value=None prim::Constant pnnx_9804 0 1 23086 value=1 torch.roll torch.roll_2467 3 1 shifted_x.49 10086 10087 x10.13 $input=shifted_x.49 $shifts=10086 $dims=10087 #shifted_x.49=(1,48,48,192)f32 #x10.13=(1,48,48,192)f32 Tensor.view Tensor.view_1509 2 1 x10.13 10091 x11.13 $input=x10.13 $shape=10091 #x10.13=(1,48,48,192)f32 #x11.13=(1,2304,192)f32 aten::add pnnx_9805 3 1 9936 x11.13 23086 input.219 #9936=(1,2304,192)f32 #x11.13=(1,2304,192)f32 #input.219=(1,2304,192)f32 nn.LayerNorm layers_dfe.1.residual_group.blocks.5.norm2 1 1 input.219 10095 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.219=(1,2304,192)f32 #10095=(1,2304,192)f32 nn.Linear layers_dfe.1.residual_group.blocks.5.mlp.fc1 1 1 10095 10100 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #10095=(1,2304,192)f32 #10100=(1,2304,384)f32 nn.GELU layers_dfe.1.residual_group.blocks.5.mlp.act 1 1 10100 10101 #10100=(1,2304,384)f32 #10101=(1,2304,384)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.mlp.drop 1 1 10101 10102 #10101=(1,2304,384)f32 #10102=(1,2304,384)f32 nn.Linear layers_dfe.1.residual_group.blocks.5.mlp.fc2 1 1 10102 10103 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #10102=(1,2304,384)f32 #10103=(1,2304,192)f32 nn.Dropout layers_dfe.1.residual_group.blocks.5.mlp.drop 1 1 10103 10104 #10103=(1,2304,192)f32 #10104=(1,2304,192)f32 prim::Constant pnnx_9806 0 1 10105 value=None prim::Constant pnnx_9807 0 1 23087 value=1 aten::add pnnx_9808 3 1 input.219 10104 23087 10106 #input.219=(1,2304,192)f32 #10104=(1,2304,192)f32 #10106=(1,2304,192)f32 prim::Constant pnnx_9809 0 1 10107 value=0 prim::Constant pnnx_9810 0 1 10108 value=1 prim::Constant pnnx_9811 0 1 10109 value=2 prim::Constant pnnx_9812 0 1 10110 value=192 aten::size pnnx_9813 2 1 10106 10107 10111 #10106=(1,2304,192)f32 prim::NumToTensor pnnx_9814 1 1 10111 B.117 aten::Int pnnx_9815 1 1 B.117 10113 prim::ListConstruct pnnx_9817 4 1 10113 10110 1007 1247 10115 torch.transpose torch.transpose_3073 3 1 10106 10108 10109 10114 $input=10106 $dim0=10108 $dim1=10109 #10106=(1,2304,192)f32 #10114=(1,192,2304)f32 Tensor.view Tensor.view_1510 2 1 10114 10115 input.221 $input=10114 $shape=10115 #10114=(1,192,2304)f32 #input.221=(1,192,48,48)f32 nn.Conv2d layers_dfe.1.conv 1 1 input.221 10117 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.221=(1,192,48,48)f32 #10117=(1,192,48,48)f32 prim::Constant pnnx_9819 0 1 10118 value=-1 prim::Constant pnnx_9820 0 1 10119 value=2 prim::Constant pnnx_9821 0 1 10120 value=1 prim::Constant pnnx_9823 0 1 23088 value=2 torch.flatten torch.flatten_2192 3 1 10117 10119 10118 10121 $input=10117 $start_dim=10119 $end_dim=10118 #10117=(1,192,48,48)f32 #10121=(1,192,2304)f32 torch.transpose torch.transpose_3074 3 1 10121 10120 23088 10122 $input=10121 $dim0=10120 $dim1=23088 #10121=(1,192,2304)f32 #10122=(1,2304,192)f32 aten::add pnnx_9825 3 1 10122 9141 9142 10123 #10122=(1,2304,192)f32 #9141=(1,2304,192)f32 #10123=(1,2304,192)f32 prim::Constant pnnx_9826 0 1 10124 value=1 prim::Constant pnnx_9827 0 1 10141 value=trunc prim::Constant pnnx_9828 0 1 10142 value=8 prim::Constant pnnx_9829 0 1 10143 value=0 prim::Constant pnnx_9830 0 1 10144 value=2 prim::Constant pnnx_9831 0 1 10145 value=1 prim::Constant pnnx_9832 0 1 10146 value=3 prim::Constant pnnx_9833 0 1 10147 value=8 prim::Constant pnnx_9834 0 1 10148 value=4 prim::Constant pnnx_9835 0 1 10149 value=5 prim::Constant pnnx_9836 0 1 10150 value=-1 prim::Constant pnnx_9837 0 1 10151 value=64 aten::size pnnx_9838 2 1 10123 10143 10157 #10123=(1,2304,192)f32 prim::NumToTensor pnnx_9839 1 1 10157 B.119 aten::Int pnnx_9840 1 1 B.119 10159 aten::Int pnnx_9841 1 1 B.119 10160 aten::size pnnx_9842 2 1 10123 10144 10161 #10123=(1,2304,192)f32 prim::NumToTensor pnnx_9843 1 1 10161 C.203 aten::Int pnnx_9844 1 1 C.203 10163 aten::Int pnnx_9845 1 1 C.203 10164 aten::Int pnnx_9846 1 1 C.203 10165 aten::Int pnnx_9847 1 1 C.203 10166 nn.LayerNorm layers_dfe.2.residual_group.blocks.0.norm1 1 1 10123 10167 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #10123=(1,2304,192)f32 #10167=(1,2304,192)f32 prim::ListConstruct pnnx_9848 4 1 10160 1004 1244 10166 10168 prim::Constant pnnx_9850 0 1 23089 value=0 Tensor.view Tensor.view_1511 2 1 10167 10168 x.99 $input=10167 $shape=10168 #10167=(1,2304,192)f32 #x.99=(1,48,48,192)f32 aten::size pnnx_9851 2 1 x.99 23089 10170 #x.99=(1,48,48,192)f32 prim::NumToTensor pnnx_9852 1 1 10170 B1.27 aten::Int pnnx_9853 1 1 B1.27 10172 aten::size pnnx_9854 2 1 x.99 10145 10173 #x.99=(1,48,48,192)f32 prim::NumToTensor pnnx_9855 1 1 10173 10174 prim::Constant pnnx_9856 0 1 23090 value=2 aten::size pnnx_9857 2 1 x.99 23090 10175 #x.99=(1,48,48,192)f32 prim::NumToTensor pnnx_9858 1 1 10175 10176 aten::size pnnx_9859 2 1 x.99 10146 10177 #x.99=(1,48,48,192)f32 prim::NumToTensor pnnx_9860 1 1 10177 C1.27 aten::Int pnnx_9861 1 1 C1.27 10179 aten::Int pnnx_9862 1 1 C1.27 10180 aten::div pnnx_9863 3 1 10174 10142 10141 10181 aten::Int pnnx_9864 1 1 10181 10182 prim::Constant pnnx_9865 0 1 23091 value=8 prim::Constant pnnx_9866 0 1 23092 value=trunc aten::div pnnx_9867 3 1 10176 23091 23092 10183 aten::Int pnnx_9868 1 1 10183 10184 prim::Constant pnnx_9869 0 1 23093 value=8 prim::ListConstruct pnnx_9870 6 1 10172 10182 10147 10184 23093 10180 10185 prim::Constant pnnx_9872 0 1 23094 value=0 prim::Constant pnnx_9873 0 1 23095 value=1 prim::Constant pnnx_9874 0 1 23096 value=3 prim::Constant pnnx_9875 0 1 23097 value=2 prim::ListConstruct pnnx_9876 6 1 23094 23095 23096 23097 10148 10149 10187 Tensor.view Tensor.view_1512 2 1 x.99 10185 x5.51 $input=x.99 $shape=10185 #x.99=(1,48,48,192)f32 #x5.51=(1,6,8,6,8,192)f32 prim::Constant pnnx_9880 0 1 23099 value=8 prim::Constant pnnx_9881 0 1 23100 value=8 prim::ListConstruct pnnx_9882 4 1 10150 23099 23100 10179 10190 torch.permute torch.permute_2720 2 1 x5.51 10187 10188 $input=x5.51 $dims=10187 #x5.51=(1,6,8,6,8,192)f32 #10188=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_146 1 1 10188 10189 memory_format=torch.contiguous_format $input=10188 #10188=(1,6,6,8,8,192)f32 #10189=(1,6,6,8,8,192)f32 prim::Constant pnnx_9884 0 1 23101 value=-1 prim::ListConstruct pnnx_9885 3 1 23101 10151 10165 10192 prim::Constant pnnx_9887 0 1 10194 value=1.767767e-01 prim::Constant pnnx_9888 0 1 10195 value=trunc prim::Constant pnnx_9889 0 1 10196 value=6 prim::Constant pnnx_9890 0 1 10197 value=0 prim::Constant pnnx_9891 0 1 10198 value=1 prim::Constant pnnx_9892 0 1 10199 value=2 prim::Constant pnnx_9893 0 1 10200 value=3 prim::Constant pnnx_9894 0 1 10201 value=6 prim::Constant pnnx_9895 0 1 10202 value=4 prim::Constant pnnx_9896 0 1 10203 value=-2 prim::Constant pnnx_9897 0 1 10204 value=-1 prim::Constant pnnx_9898 0 1 10205 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.0.attn 0 1 relative_position_bias_table.99 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.99=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.0.attn 0 1 relative_position_index.99 @relative_position_index=(64,64)i64 #relative_position_index.99=(64,64)i64 Tensor.view Tensor.view_1513 2 1 10189 10190 x_windows.99 $input=10189 $shape=10190 #10189=(1,6,6,8,8,192)f32 #x_windows.99=(36,8,8,192)f32 Tensor.view Tensor.view_1514 2 1 x_windows.99 10192 x6.27 $input=x_windows.99 $shape=10192 #x_windows.99=(36,8,8,192)f32 #x6.27=(36,64,192)f32 aten::size pnnx_9899 2 1 x6.27 10197 10213 #x6.27=(36,64,192)f32 prim::NumToTensor pnnx_9900 1 1 10213 B_.99 aten::Int pnnx_9901 1 1 B_.99 10215 aten::Int pnnx_9902 1 1 B_.99 10216 aten::size pnnx_9903 2 1 x6.27 10198 10217 #x6.27=(36,64,192)f32 prim::NumToTensor pnnx_9904 1 1 10217 N.99 aten::Int pnnx_9905 1 1 N.99 10219 aten::Int pnnx_9906 1 1 N.99 10220 aten::size pnnx_9907 2 1 x6.27 10199 10221 #x6.27=(36,64,192)f32 prim::NumToTensor pnnx_9908 1 1 10221 C.205 aten::Int pnnx_9909 1 1 C.205 10223 nn.Linear layers_dfe.2.residual_group.blocks.0.attn.qkv 1 1 x6.27 10224 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.27=(36,64,192)f32 #10224=(36,64,576)f32 aten::div pnnx_9910 3 1 C.205 10196 10195 10225 aten::Int pnnx_9911 1 1 10225 10226 prim::ListConstruct pnnx_9912 5 1 10216 10220 10200 10201 10226 10227 prim::Constant pnnx_9914 0 1 23102 value=2 prim::Constant pnnx_9915 0 1 23103 value=0 prim::Constant pnnx_9916 0 1 23104 value=3 prim::Constant pnnx_9917 0 1 23105 value=1 prim::ListConstruct pnnx_9918 5 1 23102 23103 23104 23105 10202 10229 Tensor.reshape Tensor.reshape_530 2 1 10224 10227 10228 $input=10224 $shape=10227 #10224=(36,64,576)f32 #10228=(36,64,3,6,32)f32 prim::Constant pnnx_9920 0 1 23106 value=0 prim::Constant pnnx_9921 0 1 23107 value=0 prim::Constant pnnx_9923 0 1 23108 value=0 prim::Constant pnnx_9924 0 1 23109 value=1 prim::Constant pnnx_9926 0 1 23110 value=0 prim::Constant pnnx_9927 0 1 23111 value=2 torch.permute torch.permute_2721 2 1 10228 10229 qkv1.27 $input=10228 $dims=10229 #10228=(36,64,3,6,32)f32 #qkv1.27=(3,36,6,64,32)f32 Tensor.select Tensor.select_794 3 1 qkv1.27 23106 23107 q.99 $input=qkv1.27 $dim=23106 $index=23107 #qkv1.27=(3,36,6,64,32)f32 #q.99=(36,6,64,32)f32 aten::mul pnnx_9929 2 1 q.99 10194 q1.27 #q.99=(36,6,64,32)f32 #q1.27=(36,6,64,32)f32 Tensor.select Tensor.select_795 3 1 qkv1.27 23108 23109 k.99 $input=qkv1.27 $dim=23108 $index=23109 #qkv1.27=(3,36,6,64,32)f32 #k.99=(36,6,64,32)f32 prim::Constant pnnx_9932 0 1 23112 value=-1 prim::ListConstruct pnnx_9933 1 1 23112 10237 Tensor.view Tensor.view_1515 2 1 relative_position_index.99 10237 10238 $input=relative_position_index.99 $shape=10237 #relative_position_index.99=(64,64)i64 #10238=(4096)i64 prim::ListConstruct pnnx_9935 1 1 10238 10239 #10238=(4096)i64 prim::Constant pnnx_9937 0 1 23113 value=64 prim::Constant pnnx_9938 0 1 23114 value=-1 prim::ListConstruct pnnx_9939 3 1 10205 23113 23114 10241 Tensor.index Tensor.index_374 2 1 relative_position_bias_table.99 10239 10240 $input=relative_position_bias_table.99 $expr=10239 #relative_position_bias_table.99=(225,6)f32 #10240=(4096,6)f32 prim::Constant pnnx_9941 0 1 23115 value=2 prim::Constant pnnx_9942 0 1 23116 value=0 prim::Constant pnnx_9943 0 1 23117 value=1 prim::ListConstruct pnnx_9944 3 1 23115 23116 23117 10243 Tensor.view Tensor.view_1516 2 1 10240 10241 relative_position_bias.99 $input=10240 $shape=10241 #10240=(4096,6)f32 #relative_position_bias.99=(64,64,6)f32 prim::Constant pnnx_9948 0 1 23119 value=0 torch.permute torch.permute_2722 2 1 relative_position_bias.99 10243 10244 $input=relative_position_bias.99 $dims=10243 #relative_position_bias.99=(64,64,6)f32 #10244=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_147 1 1 10244 relative_position_bias1.27 memory_format=torch.contiguous_format $input=10244 #10244=(6,64,64)f32 #relative_position_bias1.27=(6,64,64)f32 prim::Constant pnnx_9950 0 1 23120 value=1 torch.transpose torch.transpose_3075 3 1 k.99 10203 10204 10235 $input=k.99 $dim0=10203 $dim1=10204 #k.99=(36,6,64,32)f32 #10235=(36,6,32,64)f32 torch.matmul torch.matmul_2300 2 1 q1.27 10235 attn.199 $input=q1.27 $other=10235 #q1.27=(36,6,64,32)f32 #10235=(36,6,32,64)f32 #attn.199=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3314 2 1 relative_position_bias1.27 23119 10246 $input=relative_position_bias1.27 $dim=23119 #relative_position_bias1.27=(6,64,64)f32 #10246=(1,6,64,64)f32 aten::add pnnx_9951 3 1 attn.199 10246 23120 input.223 #attn.199=(36,6,64,64)f32 #10246=(1,6,64,64)f32 #input.223=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.0.attn.softmax 1 1 input.223 10248 dim=-1 #input.223=(36,6,64,64)f32 #10248=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.attn.attn_drop 1 1 10248 10249 #10248=(36,6,64,64)f32 #10249=(36,6,64,64)f32 Tensor.select Tensor.select_796 3 1 qkv1.27 23110 23111 v.99 $input=qkv1.27 $dim=23110 $index=23111 #qkv1.27=(3,36,6,64,32)f32 #v.99=(36,6,64,32)f32 prim::Constant pnnx_9953 0 1 23121 value=1 prim::Constant pnnx_9954 0 1 23122 value=2 torch.matmul torch.matmul_2301 2 1 10249 v.99 10250 $input=10249 $other=v.99 #10249=(36,6,64,64)f32 #v.99=(36,6,64,32)f32 #10250=(36,6,64,32)f32 prim::ListConstruct pnnx_9956 3 1 10215 10219 10223 10252 torch.transpose torch.transpose_3076 3 1 10250 23121 23122 10251 $input=10250 $dim0=23121 $dim1=23122 #10250=(36,6,64,32)f32 #10251=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_531 2 1 10251 10252 input1.29 $input=10251 $shape=10252 #10251=(36,64,6,32)f32 #input1.29=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.0.attn.proj 1 1 input1.29 10254 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.29=(36,64,192)f32 #10254=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.attn.proj_drop 1 1 10254 10255 #10254=(36,64,192)f32 #10255=(36,64,192)f32 prim::Constant pnnx_9958 0 1 23123 value=-1 prim::Constant pnnx_9959 0 1 23124 value=8 prim::Constant pnnx_9960 0 1 23125 value=8 prim::ListConstruct pnnx_9961 4 1 23123 23124 23125 10164 10256 prim::Constant pnnx_9963 0 1 23126 value=8 prim::Constant pnnx_9964 0 1 23127 value=trunc aten::div pnnx_9965 3 1 H0.1 23126 23127 10258 aten::Int pnnx_9966 1 1 10258 10259 prim::Constant pnnx_9967 0 1 23128 value=8 prim::Constant pnnx_9968 0 1 23129 value=trunc aten::div pnnx_9969 3 1 W0.1 23128 23129 10260 aten::Int pnnx_9970 1 1 10260 10261 prim::Constant pnnx_9971 0 1 23130 value=1 prim::Constant pnnx_9972 0 1 23131 value=8 prim::Constant pnnx_9973 0 1 23132 value=8 prim::Constant pnnx_9974 0 1 23133 value=-1 prim::ListConstruct pnnx_9975 6 1 23130 10259 10261 23131 23132 23133 10262 prim::Constant pnnx_9977 0 1 23134 value=0 prim::Constant pnnx_9978 0 1 23135 value=1 prim::Constant pnnx_9979 0 1 23136 value=3 prim::Constant pnnx_9980 0 1 23137 value=2 prim::Constant pnnx_9981 0 1 23138 value=4 prim::Constant pnnx_9982 0 1 23139 value=5 prim::ListConstruct pnnx_9983 6 1 23134 23135 23136 23137 23138 23139 10264 Tensor.view Tensor.view_1517 2 1 10255 10256 windows.99 $input=10255 $shape=10256 #10255=(36,64,192)f32 #windows.99=(36,8,8,192)f32 Tensor.view Tensor.view_1518 2 1 windows.99 10262 x7.27 $input=windows.99 $shape=10262 #windows.99=(36,8,8,192)f32 #x7.27=(1,6,6,8,8,192)f32 prim::Constant pnnx_9987 0 1 23141 value=1 prim::Constant pnnx_9988 0 1 23142 value=-1 prim::ListConstruct pnnx_9989 4 1 23141 1001 1241 23142 10267 torch.permute torch.permute_2723 2 1 x7.27 10264 10265 $input=x7.27 $dims=10264 #x7.27=(1,6,6,8,8,192)f32 #10265=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_148 1 1 10265 10266 memory_format=torch.contiguous_format $input=10265 #10265=(1,6,8,6,8,192)f32 #10266=(1,6,8,6,8,192)f32 aten::mul pnnx_9991 2 1 H0.1 W0.1 10269 aten::Int pnnx_9992 1 1 10269 10270 prim::ListConstruct pnnx_9993 3 1 10159 10270 10163 10271 prim::Constant pnnx_9995 0 1 10273 value=None prim::Constant pnnx_9996 0 1 23143 value=1 Tensor.view Tensor.view_1519 2 1 10266 10267 x8.27 $input=10266 $shape=10267 #10266=(1,6,8,6,8,192)f32 #x8.27=(1,48,48,192)f32 Tensor.view Tensor.view_1520 2 1 x8.27 10271 x9.27 $input=x8.27 $shape=10271 #x8.27=(1,48,48,192)f32 #x9.27=(1,2304,192)f32 aten::add pnnx_9997 3 1 10123 x9.27 23143 input.225 #10123=(1,2304,192)f32 #x9.27=(1,2304,192)f32 #input.225=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.0.norm2 1 1 input.225 10275 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.225=(1,2304,192)f32 #10275=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.0.mlp.fc1 1 1 10275 10280 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #10275=(1,2304,192)f32 #10280=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.0.mlp.act 1 1 10280 10281 #10280=(1,2304,384)f32 #10281=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.mlp.drop 1 1 10281 10282 #10281=(1,2304,384)f32 #10282=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.0.mlp.fc2 1 1 10282 10283 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #10282=(1,2304,384)f32 #10283=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.0.mlp.drop 1 1 10283 10284 #10283=(1,2304,192)f32 #10284=(1,2304,192)f32 prim::Constant pnnx_9998 0 1 10285 value=None prim::Constant pnnx_9999 0 1 23144 value=1 aten::add pnnx_10000 3 1 input.225 10284 23144 10286 #input.225=(1,2304,192)f32 #10284=(1,2304,192)f32 #10286=(1,2304,192)f32 prim::Constant pnnx_10001 0 1 10287 value=trunc prim::Constant pnnx_10002 0 1 10288 value=8 prim::Constant pnnx_10003 0 1 10289 value=0 prim::Constant pnnx_10004 0 1 10290 value=2 prim::Constant pnnx_10005 0 1 10291 value=-4 prim::Constant pnnx_10006 0 1 10292 value=1 prim::Constant pnnx_10007 0 1 10293 value=3 prim::Constant pnnx_10008 0 1 10294 value=8 prim::Constant pnnx_10009 0 1 10295 value=4 prim::Constant pnnx_10010 0 1 10296 value=5 prim::Constant pnnx_10011 0 1 10297 value=-1 prim::Constant pnnx_10012 0 1 10298 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.1 0 1 attn_mask.51 @attn_mask=(36,64,64)f32 #attn_mask.51=(36,64,64)f32 aten::size pnnx_10013 2 1 10286 10289 10305 #10286=(1,2304,192)f32 prim::NumToTensor pnnx_10014 1 1 10305 B.121 aten::Int pnnx_10015 1 1 B.121 10307 aten::Int pnnx_10016 1 1 B.121 10308 aten::size pnnx_10017 2 1 10286 10290 10309 #10286=(1,2304,192)f32 prim::NumToTensor pnnx_10018 1 1 10309 C.207 aten::Int pnnx_10019 1 1 C.207 10311 aten::Int pnnx_10020 1 1 C.207 10312 aten::Int pnnx_10021 1 1 C.207 10313 aten::Int pnnx_10022 1 1 C.207 10314 nn.LayerNorm layers_dfe.2.residual_group.blocks.1.norm1 1 1 10286 10315 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #10286=(1,2304,192)f32 #10315=(1,2304,192)f32 prim::ListConstruct pnnx_10023 4 1 10308 998 1238 10314 10316 prim::Constant pnnx_10025 0 1 23145 value=-4 prim::ListConstruct pnnx_10026 2 1 10291 23145 10318 prim::Constant pnnx_10027 0 1 23146 value=2 prim::ListConstruct pnnx_10028 2 1 10292 23146 10319 Tensor.view Tensor.view_1521 2 1 10315 10316 x.101 $input=10315 $shape=10316 #10315=(1,2304,192)f32 #x.101=(1,48,48,192)f32 prim::Constant pnnx_10030 0 1 23147 value=0 torch.roll torch.roll_2468 3 1 x.101 10318 10319 x6.29 $input=x.101 $shifts=10318 $dims=10319 #x.101=(1,48,48,192)f32 #x6.29=(1,48,48,192)f32 aten::size pnnx_10031 2 1 x6.29 23147 10321 #x6.29=(1,48,48,192)f32 prim::NumToTensor pnnx_10032 1 1 10321 B1.29 aten::Int pnnx_10033 1 1 B1.29 10323 prim::Constant pnnx_10034 0 1 23148 value=1 aten::size pnnx_10035 2 1 x6.29 23148 10324 #x6.29=(1,48,48,192)f32 prim::NumToTensor pnnx_10036 1 1 10324 10325 prim::Constant pnnx_10037 0 1 23149 value=2 aten::size pnnx_10038 2 1 x6.29 23149 10326 #x6.29=(1,48,48,192)f32 prim::NumToTensor pnnx_10039 1 1 10326 10327 aten::size pnnx_10040 2 1 x6.29 10293 10328 #x6.29=(1,48,48,192)f32 prim::NumToTensor pnnx_10041 1 1 10328 C1.29 aten::Int pnnx_10042 1 1 C1.29 10330 aten::Int pnnx_10043 1 1 C1.29 10331 aten::div pnnx_10044 3 1 10325 10288 10287 10332 aten::Int pnnx_10045 1 1 10332 10333 prim::Constant pnnx_10046 0 1 23150 value=8 prim::Constant pnnx_10047 0 1 23151 value=trunc aten::div pnnx_10048 3 1 10327 23150 23151 10334 aten::Int pnnx_10049 1 1 10334 10335 prim::Constant pnnx_10050 0 1 23152 value=8 prim::ListConstruct pnnx_10051 6 1 10323 10333 10294 10335 23152 10331 10336 prim::Constant pnnx_10053 0 1 23153 value=0 prim::Constant pnnx_10054 0 1 23154 value=1 prim::Constant pnnx_10055 0 1 23155 value=3 prim::Constant pnnx_10056 0 1 23156 value=2 prim::ListConstruct pnnx_10057 6 1 23153 23154 23155 23156 10295 10296 10338 Tensor.view Tensor.view_1522 2 1 x6.29 10336 x7.29 $input=x6.29 $shape=10336 #x6.29=(1,48,48,192)f32 #x7.29=(1,6,8,6,8,192)f32 prim::Constant pnnx_10061 0 1 23158 value=8 prim::Constant pnnx_10062 0 1 23159 value=8 prim::ListConstruct pnnx_10063 4 1 10297 23158 23159 10330 10341 torch.permute torch.permute_2724 2 1 x7.29 10338 10339 $input=x7.29 $dims=10338 #x7.29=(1,6,8,6,8,192)f32 #10339=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_149 1 1 10339 10340 memory_format=torch.contiguous_format $input=10339 #10339=(1,6,6,8,8,192)f32 #10340=(1,6,6,8,8,192)f32 prim::Constant pnnx_10065 0 1 23160 value=-1 prim::ListConstruct pnnx_10066 3 1 23160 10298 10313 10343 prim::Constant pnnx_10068 0 1 10345 value=1.767767e-01 prim::Constant pnnx_10069 0 1 10346 value=trunc prim::Constant pnnx_10070 0 1 10347 value=6 prim::Constant pnnx_10071 0 1 10348 value=0 prim::Constant pnnx_10072 0 1 10349 value=1 prim::Constant pnnx_10073 0 1 10350 value=2 prim::Constant pnnx_10074 0 1 10351 value=3 prim::Constant pnnx_10075 0 1 10352 value=6 prim::Constant pnnx_10076 0 1 10353 value=4 prim::Constant pnnx_10077 0 1 10354 value=-2 prim::Constant pnnx_10078 0 1 10355 value=-1 prim::Constant pnnx_10079 0 1 10356 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.1.attn 0 1 relative_position_bias_table.101 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.101=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.1.attn 0 1 relative_position_index.101 @relative_position_index=(64,64)i64 #relative_position_index.101=(64,64)i64 Tensor.view Tensor.view_1523 2 1 10340 10341 x_windows.101 $input=10340 $shape=10341 #10340=(1,6,6,8,8,192)f32 #x_windows.101=(36,8,8,192)f32 Tensor.view Tensor.view_1524 2 1 x_windows.101 10343 x8.29 $input=x_windows.101 $shape=10343 #x_windows.101=(36,8,8,192)f32 #x8.29=(36,64,192)f32 aten::size pnnx_10080 2 1 x8.29 10348 10364 #x8.29=(36,64,192)f32 prim::NumToTensor pnnx_10081 1 1 10364 B_.101 aten::Int pnnx_10082 1 1 B_.101 10366 aten::Int pnnx_10083 1 1 B_.101 10367 aten::size pnnx_10084 2 1 x8.29 10349 10368 #x8.29=(36,64,192)f32 prim::NumToTensor pnnx_10085 1 1 10368 N.101 aten::Int pnnx_10086 1 1 N.101 10370 aten::Int pnnx_10087 1 1 N.101 10371 aten::Int pnnx_10088 1 1 N.101 10372 aten::Int pnnx_10089 1 1 N.101 10373 aten::Int pnnx_10090 1 1 N.101 10374 aten::Int pnnx_10091 1 1 N.101 10375 aten::size pnnx_10092 2 1 x8.29 10350 10376 #x8.29=(36,64,192)f32 prim::NumToTensor pnnx_10093 1 1 10376 C.209 aten::Int pnnx_10094 1 1 C.209 10378 nn.Linear layers_dfe.2.residual_group.blocks.1.attn.qkv 1 1 x8.29 10379 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.29=(36,64,192)f32 #10379=(36,64,576)f32 aten::div pnnx_10095 3 1 C.209 10347 10346 10380 aten::Int pnnx_10096 1 1 10380 10381 prim::ListConstruct pnnx_10097 5 1 10367 10375 10351 10352 10381 10382 prim::Constant pnnx_10099 0 1 23161 value=2 prim::Constant pnnx_10100 0 1 23162 value=0 prim::Constant pnnx_10101 0 1 23163 value=3 prim::Constant pnnx_10102 0 1 23164 value=1 prim::ListConstruct pnnx_10103 5 1 23161 23162 23163 23164 10353 10384 Tensor.reshape Tensor.reshape_532 2 1 10379 10382 10383 $input=10379 $shape=10382 #10379=(36,64,576)f32 #10383=(36,64,3,6,32)f32 prim::Constant pnnx_10105 0 1 23165 value=0 prim::Constant pnnx_10106 0 1 23166 value=0 prim::Constant pnnx_10108 0 1 23167 value=0 prim::Constant pnnx_10109 0 1 23168 value=1 prim::Constant pnnx_10111 0 1 23169 value=0 prim::Constant pnnx_10112 0 1 23170 value=2 torch.permute torch.permute_2725 2 1 10383 10384 qkv1.29 $input=10383 $dims=10384 #10383=(36,64,3,6,32)f32 #qkv1.29=(3,36,6,64,32)f32 Tensor.select Tensor.select_797 3 1 qkv1.29 23165 23166 q.101 $input=qkv1.29 $dim=23165 $index=23166 #qkv1.29=(3,36,6,64,32)f32 #q.101=(36,6,64,32)f32 aten::mul pnnx_10114 2 1 q.101 10345 q1.29 #q.101=(36,6,64,32)f32 #q1.29=(36,6,64,32)f32 Tensor.select Tensor.select_798 3 1 qkv1.29 23167 23168 k.101 $input=qkv1.29 $dim=23167 $index=23168 #qkv1.29=(3,36,6,64,32)f32 #k.101=(36,6,64,32)f32 prim::Constant pnnx_10117 0 1 23171 value=-1 prim::ListConstruct pnnx_10118 1 1 23171 10392 Tensor.view Tensor.view_1525 2 1 relative_position_index.101 10392 10393 $input=relative_position_index.101 $shape=10392 #relative_position_index.101=(64,64)i64 #10393=(4096)i64 prim::ListConstruct pnnx_10120 1 1 10393 10394 #10393=(4096)i64 prim::Constant pnnx_10122 0 1 23172 value=64 prim::Constant pnnx_10123 0 1 23173 value=-1 prim::ListConstruct pnnx_10124 3 1 10356 23172 23173 10396 Tensor.index Tensor.index_375 2 1 relative_position_bias_table.101 10394 10395 $input=relative_position_bias_table.101 $expr=10394 #relative_position_bias_table.101=(225,6)f32 #10395=(4096,6)f32 prim::Constant pnnx_10126 0 1 23174 value=2 prim::Constant pnnx_10127 0 1 23175 value=0 prim::Constant pnnx_10128 0 1 23176 value=1 prim::ListConstruct pnnx_10129 3 1 23174 23175 23176 10398 Tensor.view Tensor.view_1526 2 1 10395 10396 relative_position_bias.101 $input=10395 $shape=10396 #10395=(4096,6)f32 #relative_position_bias.101=(64,64,6)f32 prim::Constant pnnx_10133 0 1 23178 value=0 torch.permute torch.permute_2726 2 1 relative_position_bias.101 10398 10399 $input=relative_position_bias.101 $dims=10398 #relative_position_bias.101=(64,64,6)f32 #10399=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_150 1 1 10399 relative_position_bias1.29 memory_format=torch.contiguous_format $input=10399 #10399=(6,64,64)f32 #relative_position_bias1.29=(6,64,64)f32 prim::Constant pnnx_10135 0 1 23179 value=1 torch.transpose torch.transpose_3077 3 1 k.101 10354 10355 10390 $input=k.101 $dim0=10354 $dim1=10355 #k.101=(36,6,64,32)f32 #10390=(36,6,32,64)f32 torch.matmul torch.matmul_2302 2 1 q1.29 10390 attn.203 $input=q1.29 $other=10390 #q1.29=(36,6,64,32)f32 #10390=(36,6,32,64)f32 #attn.203=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3315 2 1 relative_position_bias1.29 23178 10401 $input=relative_position_bias1.29 $dim=23178 #relative_position_bias1.29=(6,64,64)f32 #10401=(1,6,64,64)f32 aten::add pnnx_10136 3 1 attn.203 10401 23179 attn2.15 #attn.203=(36,6,64,64)f32 #10401=(1,6,64,64)f32 #attn2.15=(36,6,64,64)f32 prim::Constant pnnx_10137 0 1 23180 value=0 aten::size pnnx_10138 2 1 attn_mask.51 23180 10403 #attn_mask.51=(36,64,64)f32 prim::NumToTensor pnnx_10139 1 1 10403 other.51 aten::Int pnnx_10140 1 1 other.51 10405 prim::Constant pnnx_10141 0 1 23181 value=trunc aten::div pnnx_10142 3 1 B_.101 other.51 23181 10406 aten::Int pnnx_10143 1 1 10406 10407 prim::Constant pnnx_10144 0 1 23182 value=6 prim::ListConstruct pnnx_10145 5 1 10407 10405 23182 10374 10373 10408 prim::Constant pnnx_10147 0 1 23183 value=1 prim::Constant pnnx_10149 0 1 23184 value=0 prim::Constant pnnx_10151 0 1 23185 value=1 Tensor.view Tensor.view_1527 2 1 attn2.15 10408 10409 $input=attn2.15 $shape=10408 #attn2.15=(36,6,64,64)f32 #10409=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3316 2 1 attn_mask.51 23183 10410 $input=attn_mask.51 $dim=23183 #attn_mask.51=(36,64,64)f32 #10410=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3317 2 1 10410 23184 10411 $input=10410 $dim=23184 #10410=(36,1,64,64)f32 #10411=(1,36,1,64,64)f32 aten::add pnnx_10152 3 1 10409 10411 23185 attn3.15 #10409=(1,36,6,64,64)f32 #10411=(1,36,1,64,64)f32 #attn3.15=(1,36,6,64,64)f32 prim::Constant pnnx_10153 0 1 23186 value=-1 prim::Constant pnnx_10154 0 1 23187 value=6 prim::ListConstruct pnnx_10155 4 1 23186 23187 10372 10371 10413 Tensor.view Tensor.view_1528 2 1 attn3.15 10413 input.227 $input=attn3.15 $shape=10413 #attn3.15=(1,36,6,64,64)f32 #input.227=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.1.attn.softmax 1 1 input.227 10415 dim=-1 #input.227=(36,6,64,64)f32 #10415=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.attn.attn_drop 1 1 10415 10416 #10415=(36,6,64,64)f32 #10416=(36,6,64,64)f32 Tensor.select Tensor.select_799 3 1 qkv1.29 23169 23170 v.101 $input=qkv1.29 $dim=23169 $index=23170 #qkv1.29=(3,36,6,64,32)f32 #v.101=(36,6,64,32)f32 prim::Constant pnnx_10158 0 1 23188 value=1 prim::Constant pnnx_10159 0 1 23189 value=2 torch.matmul torch.matmul_2303 2 1 10416 v.101 10417 $input=10416 $other=v.101 #10416=(36,6,64,64)f32 #v.101=(36,6,64,32)f32 #10417=(36,6,64,32)f32 prim::ListConstruct pnnx_10161 3 1 10366 10370 10378 10419 torch.transpose torch.transpose_3078 3 1 10417 23188 23189 10418 $input=10417 $dim0=23188 $dim1=23189 #10417=(36,6,64,32)f32 #10418=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_533 2 1 10418 10419 input1.31 $input=10418 $shape=10419 #10418=(36,64,6,32)f32 #input1.31=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.1.attn.proj 1 1 input1.31 10421 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.31=(36,64,192)f32 #10421=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.attn.proj_drop 1 1 10421 10422 #10421=(36,64,192)f32 #10422=(36,64,192)f32 prim::Constant pnnx_10163 0 1 23190 value=-1 prim::Constant pnnx_10164 0 1 23191 value=8 prim::Constant pnnx_10165 0 1 23192 value=8 prim::ListConstruct pnnx_10166 4 1 23190 23191 23192 10312 10423 prim::Constant pnnx_10168 0 1 23193 value=8 prim::Constant pnnx_10169 0 1 23194 value=trunc aten::div pnnx_10170 3 1 H0.1 23193 23194 10425 aten::Int pnnx_10171 1 1 10425 10426 prim::Constant pnnx_10172 0 1 23195 value=8 prim::Constant pnnx_10173 0 1 23196 value=trunc aten::div pnnx_10174 3 1 W0.1 23195 23196 10427 aten::Int pnnx_10175 1 1 10427 10428 prim::Constant pnnx_10176 0 1 23197 value=1 prim::Constant pnnx_10177 0 1 23198 value=8 prim::Constant pnnx_10178 0 1 23199 value=8 prim::Constant pnnx_10179 0 1 23200 value=-1 prim::ListConstruct pnnx_10180 6 1 23197 10426 10428 23198 23199 23200 10429 prim::Constant pnnx_10182 0 1 23201 value=0 prim::Constant pnnx_10183 0 1 23202 value=1 prim::Constant pnnx_10184 0 1 23203 value=3 prim::Constant pnnx_10185 0 1 23204 value=2 prim::Constant pnnx_10186 0 1 23205 value=4 prim::Constant pnnx_10187 0 1 23206 value=5 prim::ListConstruct pnnx_10188 6 1 23201 23202 23203 23204 23205 23206 10431 Tensor.view Tensor.view_1529 2 1 10422 10423 windows.101 $input=10422 $shape=10423 #10422=(36,64,192)f32 #windows.101=(36,8,8,192)f32 Tensor.view Tensor.view_1530 2 1 windows.101 10429 x9.29 $input=windows.101 $shape=10429 #windows.101=(36,8,8,192)f32 #x9.29=(1,6,6,8,8,192)f32 prim::Constant pnnx_10192 0 1 23208 value=1 prim::Constant pnnx_10193 0 1 23209 value=-1 prim::ListConstruct pnnx_10194 4 1 23208 995 1235 23209 10434 torch.permute torch.permute_2727 2 1 x9.29 10431 10432 $input=x9.29 $dims=10431 #x9.29=(1,6,6,8,8,192)f32 #10432=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_151 1 1 10432 10433 memory_format=torch.contiguous_format $input=10432 #10432=(1,6,8,6,8,192)f32 #10433=(1,6,8,6,8,192)f32 prim::Constant pnnx_10196 0 1 23210 value=4 prim::Constant pnnx_10197 0 1 23211 value=4 prim::ListConstruct pnnx_10198 2 1 23210 23211 10436 prim::Constant pnnx_10199 0 1 23212 value=1 prim::Constant pnnx_10200 0 1 23213 value=2 prim::ListConstruct pnnx_10201 2 1 23212 23213 10437 Tensor.view Tensor.view_1531 2 1 10433 10434 shifted_x.51 $input=10433 $shape=10434 #10433=(1,6,8,6,8,192)f32 #shifted_x.51=(1,48,48,192)f32 aten::mul pnnx_10203 2 1 H0.1 W0.1 10439 aten::Int pnnx_10204 1 1 10439 10440 prim::ListConstruct pnnx_10205 3 1 10307 10440 10311 10441 prim::Constant pnnx_10207 0 1 10443 value=None prim::Constant pnnx_10208 0 1 23214 value=1 torch.roll torch.roll_2469 3 1 shifted_x.51 10436 10437 x10.15 $input=shifted_x.51 $shifts=10436 $dims=10437 #shifted_x.51=(1,48,48,192)f32 #x10.15=(1,48,48,192)f32 Tensor.view Tensor.view_1532 2 1 x10.15 10441 x11.15 $input=x10.15 $shape=10441 #x10.15=(1,48,48,192)f32 #x11.15=(1,2304,192)f32 aten::add pnnx_10209 3 1 10286 x11.15 23214 input.229 #10286=(1,2304,192)f32 #x11.15=(1,2304,192)f32 #input.229=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.1.norm2 1 1 input.229 10445 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.229=(1,2304,192)f32 #10445=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.1.mlp.fc1 1 1 10445 10450 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #10445=(1,2304,192)f32 #10450=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.1.mlp.act 1 1 10450 10451 #10450=(1,2304,384)f32 #10451=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.mlp.drop 1 1 10451 10452 #10451=(1,2304,384)f32 #10452=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.1.mlp.fc2 1 1 10452 10453 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #10452=(1,2304,384)f32 #10453=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.1.mlp.drop 1 1 10453 10454 #10453=(1,2304,192)f32 #10454=(1,2304,192)f32 prim::Constant pnnx_10210 0 1 10455 value=None prim::Constant pnnx_10211 0 1 23215 value=1 aten::add pnnx_10212 3 1 input.229 10454 23215 10456 #input.229=(1,2304,192)f32 #10454=(1,2304,192)f32 #10456=(1,2304,192)f32 prim::Constant pnnx_10213 0 1 10457 value=trunc prim::Constant pnnx_10214 0 1 10458 value=8 prim::Constant pnnx_10215 0 1 10459 value=0 prim::Constant pnnx_10216 0 1 10460 value=2 prim::Constant pnnx_10217 0 1 10461 value=1 prim::Constant pnnx_10218 0 1 10462 value=3 prim::Constant pnnx_10219 0 1 10463 value=8 prim::Constant pnnx_10220 0 1 10464 value=4 prim::Constant pnnx_10221 0 1 10465 value=5 prim::Constant pnnx_10222 0 1 10466 value=-1 prim::Constant pnnx_10223 0 1 10467 value=64 aten::size pnnx_10224 2 1 10456 10459 10473 #10456=(1,2304,192)f32 prim::NumToTensor pnnx_10225 1 1 10473 B.123 aten::Int pnnx_10226 1 1 B.123 10475 aten::Int pnnx_10227 1 1 B.123 10476 aten::size pnnx_10228 2 1 10456 10460 10477 #10456=(1,2304,192)f32 prim::NumToTensor pnnx_10229 1 1 10477 C.211 aten::Int pnnx_10230 1 1 C.211 10479 aten::Int pnnx_10231 1 1 C.211 10480 aten::Int pnnx_10232 1 1 C.211 10481 aten::Int pnnx_10233 1 1 C.211 10482 nn.LayerNorm layers_dfe.2.residual_group.blocks.2.norm1 1 1 10456 10483 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #10456=(1,2304,192)f32 #10483=(1,2304,192)f32 prim::ListConstruct pnnx_10234 4 1 10476 992 1232 10482 10484 prim::Constant pnnx_10236 0 1 23216 value=0 Tensor.view Tensor.view_1533 2 1 10483 10484 x.103 $input=10483 $shape=10484 #10483=(1,2304,192)f32 #x.103=(1,48,48,192)f32 aten::size pnnx_10237 2 1 x.103 23216 10486 #x.103=(1,48,48,192)f32 prim::NumToTensor pnnx_10238 1 1 10486 B1.31 aten::Int pnnx_10239 1 1 B1.31 10488 aten::size pnnx_10240 2 1 x.103 10461 10489 #x.103=(1,48,48,192)f32 prim::NumToTensor pnnx_10241 1 1 10489 10490 prim::Constant pnnx_10242 0 1 23217 value=2 aten::size pnnx_10243 2 1 x.103 23217 10491 #x.103=(1,48,48,192)f32 prim::NumToTensor pnnx_10244 1 1 10491 10492 aten::size pnnx_10245 2 1 x.103 10462 10493 #x.103=(1,48,48,192)f32 prim::NumToTensor pnnx_10246 1 1 10493 C1.31 aten::Int pnnx_10247 1 1 C1.31 10495 aten::Int pnnx_10248 1 1 C1.31 10496 aten::div pnnx_10249 3 1 10490 10458 10457 10497 aten::Int pnnx_10250 1 1 10497 10498 prim::Constant pnnx_10251 0 1 23218 value=8 prim::Constant pnnx_10252 0 1 23219 value=trunc aten::div pnnx_10253 3 1 10492 23218 23219 10499 aten::Int pnnx_10254 1 1 10499 10500 prim::Constant pnnx_10255 0 1 23220 value=8 prim::ListConstruct pnnx_10256 6 1 10488 10498 10463 10500 23220 10496 10501 prim::Constant pnnx_10258 0 1 23221 value=0 prim::Constant pnnx_10259 0 1 23222 value=1 prim::Constant pnnx_10260 0 1 23223 value=3 prim::Constant pnnx_10261 0 1 23224 value=2 prim::ListConstruct pnnx_10262 6 1 23221 23222 23223 23224 10464 10465 10503 Tensor.view Tensor.view_1534 2 1 x.103 10501 x5.53 $input=x.103 $shape=10501 #x.103=(1,48,48,192)f32 #x5.53=(1,6,8,6,8,192)f32 prim::Constant pnnx_10266 0 1 23226 value=8 prim::Constant pnnx_10267 0 1 23227 value=8 prim::ListConstruct pnnx_10268 4 1 10466 23226 23227 10495 10506 torch.permute torch.permute_2728 2 1 x5.53 10503 10504 $input=x5.53 $dims=10503 #x5.53=(1,6,8,6,8,192)f32 #10504=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_152 1 1 10504 10505 memory_format=torch.contiguous_format $input=10504 #10504=(1,6,6,8,8,192)f32 #10505=(1,6,6,8,8,192)f32 prim::Constant pnnx_10270 0 1 23228 value=-1 prim::ListConstruct pnnx_10271 3 1 23228 10467 10481 10508 prim::Constant pnnx_10273 0 1 10510 value=1.767767e-01 prim::Constant pnnx_10274 0 1 10511 value=trunc prim::Constant pnnx_10275 0 1 10512 value=6 prim::Constant pnnx_10276 0 1 10513 value=0 prim::Constant pnnx_10277 0 1 10514 value=1 prim::Constant pnnx_10278 0 1 10515 value=2 prim::Constant pnnx_10279 0 1 10516 value=3 prim::Constant pnnx_10280 0 1 10517 value=6 prim::Constant pnnx_10281 0 1 10518 value=4 prim::Constant pnnx_10282 0 1 10519 value=-2 prim::Constant pnnx_10283 0 1 10520 value=-1 prim::Constant pnnx_10284 0 1 10521 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.2.attn 0 1 relative_position_bias_table.103 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.103=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.2.attn 0 1 relative_position_index.103 @relative_position_index=(64,64)i64 #relative_position_index.103=(64,64)i64 Tensor.view Tensor.view_1535 2 1 10505 10506 x_windows.103 $input=10505 $shape=10506 #10505=(1,6,6,8,8,192)f32 #x_windows.103=(36,8,8,192)f32 Tensor.view Tensor.view_1536 2 1 x_windows.103 10508 x6.31 $input=x_windows.103 $shape=10508 #x_windows.103=(36,8,8,192)f32 #x6.31=(36,64,192)f32 aten::size pnnx_10285 2 1 x6.31 10513 10529 #x6.31=(36,64,192)f32 prim::NumToTensor pnnx_10286 1 1 10529 B_.103 aten::Int pnnx_10287 1 1 B_.103 10531 aten::Int pnnx_10288 1 1 B_.103 10532 aten::size pnnx_10289 2 1 x6.31 10514 10533 #x6.31=(36,64,192)f32 prim::NumToTensor pnnx_10290 1 1 10533 N.103 aten::Int pnnx_10291 1 1 N.103 10535 aten::Int pnnx_10292 1 1 N.103 10536 aten::size pnnx_10293 2 1 x6.31 10515 10537 #x6.31=(36,64,192)f32 prim::NumToTensor pnnx_10294 1 1 10537 C.213 aten::Int pnnx_10295 1 1 C.213 10539 nn.Linear layers_dfe.2.residual_group.blocks.2.attn.qkv 1 1 x6.31 10540 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.31=(36,64,192)f32 #10540=(36,64,576)f32 aten::div pnnx_10296 3 1 C.213 10512 10511 10541 aten::Int pnnx_10297 1 1 10541 10542 prim::ListConstruct pnnx_10298 5 1 10532 10536 10516 10517 10542 10543 prim::Constant pnnx_10300 0 1 23229 value=2 prim::Constant pnnx_10301 0 1 23230 value=0 prim::Constant pnnx_10302 0 1 23231 value=3 prim::Constant pnnx_10303 0 1 23232 value=1 prim::ListConstruct pnnx_10304 5 1 23229 23230 23231 23232 10518 10545 Tensor.reshape Tensor.reshape_534 2 1 10540 10543 10544 $input=10540 $shape=10543 #10540=(36,64,576)f32 #10544=(36,64,3,6,32)f32 prim::Constant pnnx_10306 0 1 23233 value=0 prim::Constant pnnx_10307 0 1 23234 value=0 prim::Constant pnnx_10309 0 1 23235 value=0 prim::Constant pnnx_10310 0 1 23236 value=1 prim::Constant pnnx_10312 0 1 23237 value=0 prim::Constant pnnx_10313 0 1 23238 value=2 torch.permute torch.permute_2729 2 1 10544 10545 qkv1.31 $input=10544 $dims=10545 #10544=(36,64,3,6,32)f32 #qkv1.31=(3,36,6,64,32)f32 Tensor.select Tensor.select_800 3 1 qkv1.31 23233 23234 q.103 $input=qkv1.31 $dim=23233 $index=23234 #qkv1.31=(3,36,6,64,32)f32 #q.103=(36,6,64,32)f32 aten::mul pnnx_10315 2 1 q.103 10510 q1.31 #q.103=(36,6,64,32)f32 #q1.31=(36,6,64,32)f32 Tensor.select Tensor.select_801 3 1 qkv1.31 23235 23236 k.103 $input=qkv1.31 $dim=23235 $index=23236 #qkv1.31=(3,36,6,64,32)f32 #k.103=(36,6,64,32)f32 prim::Constant pnnx_10318 0 1 23239 value=-1 prim::ListConstruct pnnx_10319 1 1 23239 10553 Tensor.view Tensor.view_1537 2 1 relative_position_index.103 10553 10554 $input=relative_position_index.103 $shape=10553 #relative_position_index.103=(64,64)i64 #10554=(4096)i64 prim::ListConstruct pnnx_10321 1 1 10554 10555 #10554=(4096)i64 prim::Constant pnnx_10323 0 1 23240 value=64 prim::Constant pnnx_10324 0 1 23241 value=-1 prim::ListConstruct pnnx_10325 3 1 10521 23240 23241 10557 Tensor.index Tensor.index_376 2 1 relative_position_bias_table.103 10555 10556 $input=relative_position_bias_table.103 $expr=10555 #relative_position_bias_table.103=(225,6)f32 #10556=(4096,6)f32 prim::Constant pnnx_10327 0 1 23242 value=2 prim::Constant pnnx_10328 0 1 23243 value=0 prim::Constant pnnx_10329 0 1 23244 value=1 prim::ListConstruct pnnx_10330 3 1 23242 23243 23244 10559 Tensor.view Tensor.view_1538 2 1 10556 10557 relative_position_bias.103 $input=10556 $shape=10557 #10556=(4096,6)f32 #relative_position_bias.103=(64,64,6)f32 prim::Constant pnnx_10334 0 1 23246 value=0 torch.permute torch.permute_2730 2 1 relative_position_bias.103 10559 10560 $input=relative_position_bias.103 $dims=10559 #relative_position_bias.103=(64,64,6)f32 #10560=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_153 1 1 10560 relative_position_bias1.31 memory_format=torch.contiguous_format $input=10560 #10560=(6,64,64)f32 #relative_position_bias1.31=(6,64,64)f32 prim::Constant pnnx_10336 0 1 23247 value=1 torch.transpose torch.transpose_3079 3 1 k.103 10519 10520 10551 $input=k.103 $dim0=10519 $dim1=10520 #k.103=(36,6,64,32)f32 #10551=(36,6,32,64)f32 torch.matmul torch.matmul_2304 2 1 q1.31 10551 attn.207 $input=q1.31 $other=10551 #q1.31=(36,6,64,32)f32 #10551=(36,6,32,64)f32 #attn.207=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3318 2 1 relative_position_bias1.31 23246 10562 $input=relative_position_bias1.31 $dim=23246 #relative_position_bias1.31=(6,64,64)f32 #10562=(1,6,64,64)f32 aten::add pnnx_10337 3 1 attn.207 10562 23247 input.231 #attn.207=(36,6,64,64)f32 #10562=(1,6,64,64)f32 #input.231=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.2.attn.softmax 1 1 input.231 10564 dim=-1 #input.231=(36,6,64,64)f32 #10564=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.attn.attn_drop 1 1 10564 10565 #10564=(36,6,64,64)f32 #10565=(36,6,64,64)f32 Tensor.select Tensor.select_802 3 1 qkv1.31 23237 23238 v.103 $input=qkv1.31 $dim=23237 $index=23238 #qkv1.31=(3,36,6,64,32)f32 #v.103=(36,6,64,32)f32 prim::Constant pnnx_10339 0 1 23248 value=1 prim::Constant pnnx_10340 0 1 23249 value=2 torch.matmul torch.matmul_2305 2 1 10565 v.103 10566 $input=10565 $other=v.103 #10565=(36,6,64,64)f32 #v.103=(36,6,64,32)f32 #10566=(36,6,64,32)f32 prim::ListConstruct pnnx_10342 3 1 10531 10535 10539 10568 torch.transpose torch.transpose_3080 3 1 10566 23248 23249 10567 $input=10566 $dim0=23248 $dim1=23249 #10566=(36,6,64,32)f32 #10567=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_535 2 1 10567 10568 input1.33 $input=10567 $shape=10568 #10567=(36,64,6,32)f32 #input1.33=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.2.attn.proj 1 1 input1.33 10570 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.33=(36,64,192)f32 #10570=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.attn.proj_drop 1 1 10570 10571 #10570=(36,64,192)f32 #10571=(36,64,192)f32 prim::Constant pnnx_10344 0 1 23250 value=-1 prim::Constant pnnx_10345 0 1 23251 value=8 prim::Constant pnnx_10346 0 1 23252 value=8 prim::ListConstruct pnnx_10347 4 1 23250 23251 23252 10480 10572 prim::Constant pnnx_10349 0 1 23253 value=8 prim::Constant pnnx_10350 0 1 23254 value=trunc aten::div pnnx_10351 3 1 H0.1 23253 23254 10574 aten::Int pnnx_10352 1 1 10574 10575 prim::Constant pnnx_10353 0 1 23255 value=8 prim::Constant pnnx_10354 0 1 23256 value=trunc aten::div pnnx_10355 3 1 W0.1 23255 23256 10576 aten::Int pnnx_10356 1 1 10576 10577 prim::Constant pnnx_10357 0 1 23257 value=1 prim::Constant pnnx_10358 0 1 23258 value=8 prim::Constant pnnx_10359 0 1 23259 value=8 prim::Constant pnnx_10360 0 1 23260 value=-1 prim::ListConstruct pnnx_10361 6 1 23257 10575 10577 23258 23259 23260 10578 prim::Constant pnnx_10363 0 1 23261 value=0 prim::Constant pnnx_10364 0 1 23262 value=1 prim::Constant pnnx_10365 0 1 23263 value=3 prim::Constant pnnx_10366 0 1 23264 value=2 prim::Constant pnnx_10367 0 1 23265 value=4 prim::Constant pnnx_10368 0 1 23266 value=5 prim::ListConstruct pnnx_10369 6 1 23261 23262 23263 23264 23265 23266 10580 Tensor.view Tensor.view_1539 2 1 10571 10572 windows.103 $input=10571 $shape=10572 #10571=(36,64,192)f32 #windows.103=(36,8,8,192)f32 Tensor.view Tensor.view_1540 2 1 windows.103 10578 x7.31 $input=windows.103 $shape=10578 #windows.103=(36,8,8,192)f32 #x7.31=(1,6,6,8,8,192)f32 prim::Constant pnnx_10373 0 1 23268 value=1 prim::Constant pnnx_10374 0 1 23269 value=-1 prim::ListConstruct pnnx_10375 4 1 23268 989 1229 23269 10583 torch.permute torch.permute_2731 2 1 x7.31 10580 10581 $input=x7.31 $dims=10580 #x7.31=(1,6,6,8,8,192)f32 #10581=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_154 1 1 10581 10582 memory_format=torch.contiguous_format $input=10581 #10581=(1,6,8,6,8,192)f32 #10582=(1,6,8,6,8,192)f32 aten::mul pnnx_10377 2 1 H0.1 W0.1 10585 aten::Int pnnx_10378 1 1 10585 10586 prim::ListConstruct pnnx_10379 3 1 10475 10586 10479 10587 prim::Constant pnnx_10381 0 1 10589 value=None prim::Constant pnnx_10382 0 1 23270 value=1 Tensor.view Tensor.view_1541 2 1 10582 10583 x8.31 $input=10582 $shape=10583 #10582=(1,6,8,6,8,192)f32 #x8.31=(1,48,48,192)f32 Tensor.view Tensor.view_1542 2 1 x8.31 10587 x9.31 $input=x8.31 $shape=10587 #x8.31=(1,48,48,192)f32 #x9.31=(1,2304,192)f32 aten::add pnnx_10383 3 1 10456 x9.31 23270 input.233 #10456=(1,2304,192)f32 #x9.31=(1,2304,192)f32 #input.233=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.2.norm2 1 1 input.233 10591 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.233=(1,2304,192)f32 #10591=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.2.mlp.fc1 1 1 10591 10596 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #10591=(1,2304,192)f32 #10596=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.2.mlp.act 1 1 10596 10597 #10596=(1,2304,384)f32 #10597=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.mlp.drop 1 1 10597 10598 #10597=(1,2304,384)f32 #10598=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.2.mlp.fc2 1 1 10598 10599 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #10598=(1,2304,384)f32 #10599=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.2.mlp.drop 1 1 10599 10600 #10599=(1,2304,192)f32 #10600=(1,2304,192)f32 prim::Constant pnnx_10384 0 1 10601 value=None prim::Constant pnnx_10385 0 1 23271 value=1 aten::add pnnx_10386 3 1 input.233 10600 23271 10602 #input.233=(1,2304,192)f32 #10600=(1,2304,192)f32 #10602=(1,2304,192)f32 prim::Constant pnnx_10387 0 1 10603 value=trunc prim::Constant pnnx_10388 0 1 10604 value=8 prim::Constant pnnx_10389 0 1 10605 value=0 prim::Constant pnnx_10390 0 1 10606 value=2 prim::Constant pnnx_10391 0 1 10607 value=-4 prim::Constant pnnx_10392 0 1 10608 value=1 prim::Constant pnnx_10393 0 1 10609 value=3 prim::Constant pnnx_10394 0 1 10610 value=8 prim::Constant pnnx_10395 0 1 10611 value=4 prim::Constant pnnx_10396 0 1 10612 value=5 prim::Constant pnnx_10397 0 1 10613 value=-1 prim::Constant pnnx_10398 0 1 10614 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.3 0 1 attn_mask.53 @attn_mask=(36,64,64)f32 #attn_mask.53=(36,64,64)f32 aten::size pnnx_10399 2 1 10602 10605 10621 #10602=(1,2304,192)f32 prim::NumToTensor pnnx_10400 1 1 10621 B.125 aten::Int pnnx_10401 1 1 B.125 10623 aten::Int pnnx_10402 1 1 B.125 10624 aten::size pnnx_10403 2 1 10602 10606 10625 #10602=(1,2304,192)f32 prim::NumToTensor pnnx_10404 1 1 10625 C.215 aten::Int pnnx_10405 1 1 C.215 10627 aten::Int pnnx_10406 1 1 C.215 10628 aten::Int pnnx_10407 1 1 C.215 10629 aten::Int pnnx_10408 1 1 C.215 10630 nn.LayerNorm layers_dfe.2.residual_group.blocks.3.norm1 1 1 10602 10631 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #10602=(1,2304,192)f32 #10631=(1,2304,192)f32 prim::ListConstruct pnnx_10409 4 1 10624 986 1226 10630 10632 prim::Constant pnnx_10411 0 1 23272 value=-4 prim::ListConstruct pnnx_10412 2 1 10607 23272 10634 prim::Constant pnnx_10413 0 1 23273 value=2 prim::ListConstruct pnnx_10414 2 1 10608 23273 10635 Tensor.view Tensor.view_1543 2 1 10631 10632 x.105 $input=10631 $shape=10632 #10631=(1,2304,192)f32 #x.105=(1,48,48,192)f32 prim::Constant pnnx_10416 0 1 23274 value=0 torch.roll torch.roll_2470 3 1 x.105 10634 10635 x6.33 $input=x.105 $shifts=10634 $dims=10635 #x.105=(1,48,48,192)f32 #x6.33=(1,48,48,192)f32 aten::size pnnx_10417 2 1 x6.33 23274 10637 #x6.33=(1,48,48,192)f32 prim::NumToTensor pnnx_10418 1 1 10637 B1.33 aten::Int pnnx_10419 1 1 B1.33 10639 prim::Constant pnnx_10420 0 1 23275 value=1 aten::size pnnx_10421 2 1 x6.33 23275 10640 #x6.33=(1,48,48,192)f32 prim::NumToTensor pnnx_10422 1 1 10640 10641 prim::Constant pnnx_10423 0 1 23276 value=2 aten::size pnnx_10424 2 1 x6.33 23276 10642 #x6.33=(1,48,48,192)f32 prim::NumToTensor pnnx_10425 1 1 10642 10643 aten::size pnnx_10426 2 1 x6.33 10609 10644 #x6.33=(1,48,48,192)f32 prim::NumToTensor pnnx_10427 1 1 10644 C1.33 aten::Int pnnx_10428 1 1 C1.33 10646 aten::Int pnnx_10429 1 1 C1.33 10647 aten::div pnnx_10430 3 1 10641 10604 10603 10648 aten::Int pnnx_10431 1 1 10648 10649 prim::Constant pnnx_10432 0 1 23277 value=8 prim::Constant pnnx_10433 0 1 23278 value=trunc aten::div pnnx_10434 3 1 10643 23277 23278 10650 aten::Int pnnx_10435 1 1 10650 10651 prim::Constant pnnx_10436 0 1 23279 value=8 prim::ListConstruct pnnx_10437 6 1 10639 10649 10610 10651 23279 10647 10652 prim::Constant pnnx_10439 0 1 23280 value=0 prim::Constant pnnx_10440 0 1 23281 value=1 prim::Constant pnnx_10441 0 1 23282 value=3 prim::Constant pnnx_10442 0 1 23283 value=2 prim::ListConstruct pnnx_10443 6 1 23280 23281 23282 23283 10611 10612 10654 Tensor.view Tensor.view_1544 2 1 x6.33 10652 x7.33 $input=x6.33 $shape=10652 #x6.33=(1,48,48,192)f32 #x7.33=(1,6,8,6,8,192)f32 prim::Constant pnnx_10447 0 1 23285 value=8 prim::Constant pnnx_10448 0 1 23286 value=8 prim::ListConstruct pnnx_10449 4 1 10613 23285 23286 10646 10657 torch.permute torch.permute_2732 2 1 x7.33 10654 10655 $input=x7.33 $dims=10654 #x7.33=(1,6,8,6,8,192)f32 #10655=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_155 1 1 10655 10656 memory_format=torch.contiguous_format $input=10655 #10655=(1,6,6,8,8,192)f32 #10656=(1,6,6,8,8,192)f32 prim::Constant pnnx_10451 0 1 23287 value=-1 prim::ListConstruct pnnx_10452 3 1 23287 10614 10629 10659 prim::Constant pnnx_10454 0 1 10661 value=1.767767e-01 prim::Constant pnnx_10455 0 1 10662 value=trunc prim::Constant pnnx_10456 0 1 10663 value=6 prim::Constant pnnx_10457 0 1 10664 value=0 prim::Constant pnnx_10458 0 1 10665 value=1 prim::Constant pnnx_10459 0 1 10666 value=2 prim::Constant pnnx_10460 0 1 10667 value=3 prim::Constant pnnx_10461 0 1 10668 value=6 prim::Constant pnnx_10462 0 1 10669 value=4 prim::Constant pnnx_10463 0 1 10670 value=-2 prim::Constant pnnx_10464 0 1 10671 value=-1 prim::Constant pnnx_10465 0 1 10672 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.3.attn 0 1 relative_position_bias_table.105 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.105=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.3.attn 0 1 relative_position_index.105 @relative_position_index=(64,64)i64 #relative_position_index.105=(64,64)i64 Tensor.view Tensor.view_1545 2 1 10656 10657 x_windows.105 $input=10656 $shape=10657 #10656=(1,6,6,8,8,192)f32 #x_windows.105=(36,8,8,192)f32 Tensor.view Tensor.view_1546 2 1 x_windows.105 10659 x8.33 $input=x_windows.105 $shape=10659 #x_windows.105=(36,8,8,192)f32 #x8.33=(36,64,192)f32 aten::size pnnx_10466 2 1 x8.33 10664 10680 #x8.33=(36,64,192)f32 prim::NumToTensor pnnx_10467 1 1 10680 B_.105 aten::Int pnnx_10468 1 1 B_.105 10682 aten::Int pnnx_10469 1 1 B_.105 10683 aten::size pnnx_10470 2 1 x8.33 10665 10684 #x8.33=(36,64,192)f32 prim::NumToTensor pnnx_10471 1 1 10684 N.105 aten::Int pnnx_10472 1 1 N.105 10686 aten::Int pnnx_10473 1 1 N.105 10687 aten::Int pnnx_10474 1 1 N.105 10688 aten::Int pnnx_10475 1 1 N.105 10689 aten::Int pnnx_10476 1 1 N.105 10690 aten::Int pnnx_10477 1 1 N.105 10691 aten::size pnnx_10478 2 1 x8.33 10666 10692 #x8.33=(36,64,192)f32 prim::NumToTensor pnnx_10479 1 1 10692 C.217 aten::Int pnnx_10480 1 1 C.217 10694 nn.Linear layers_dfe.2.residual_group.blocks.3.attn.qkv 1 1 x8.33 10695 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.33=(36,64,192)f32 #10695=(36,64,576)f32 aten::div pnnx_10481 3 1 C.217 10663 10662 10696 aten::Int pnnx_10482 1 1 10696 10697 prim::ListConstruct pnnx_10483 5 1 10683 10691 10667 10668 10697 10698 prim::Constant pnnx_10485 0 1 23288 value=2 prim::Constant pnnx_10486 0 1 23289 value=0 prim::Constant pnnx_10487 0 1 23290 value=3 prim::Constant pnnx_10488 0 1 23291 value=1 prim::ListConstruct pnnx_10489 5 1 23288 23289 23290 23291 10669 10700 Tensor.reshape Tensor.reshape_536 2 1 10695 10698 10699 $input=10695 $shape=10698 #10695=(36,64,576)f32 #10699=(36,64,3,6,32)f32 prim::Constant pnnx_10491 0 1 23292 value=0 prim::Constant pnnx_10492 0 1 23293 value=0 prim::Constant pnnx_10494 0 1 23294 value=0 prim::Constant pnnx_10495 0 1 23295 value=1 prim::Constant pnnx_10497 0 1 23296 value=0 prim::Constant pnnx_10498 0 1 23297 value=2 torch.permute torch.permute_2733 2 1 10699 10700 qkv1.33 $input=10699 $dims=10700 #10699=(36,64,3,6,32)f32 #qkv1.33=(3,36,6,64,32)f32 Tensor.select Tensor.select_803 3 1 qkv1.33 23292 23293 q.105 $input=qkv1.33 $dim=23292 $index=23293 #qkv1.33=(3,36,6,64,32)f32 #q.105=(36,6,64,32)f32 aten::mul pnnx_10500 2 1 q.105 10661 q1.33 #q.105=(36,6,64,32)f32 #q1.33=(36,6,64,32)f32 Tensor.select Tensor.select_804 3 1 qkv1.33 23294 23295 k.105 $input=qkv1.33 $dim=23294 $index=23295 #qkv1.33=(3,36,6,64,32)f32 #k.105=(36,6,64,32)f32 prim::Constant pnnx_10503 0 1 23298 value=-1 prim::ListConstruct pnnx_10504 1 1 23298 10708 Tensor.view Tensor.view_1547 2 1 relative_position_index.105 10708 10709 $input=relative_position_index.105 $shape=10708 #relative_position_index.105=(64,64)i64 #10709=(4096)i64 prim::ListConstruct pnnx_10506 1 1 10709 10710 #10709=(4096)i64 prim::Constant pnnx_10508 0 1 23299 value=64 prim::Constant pnnx_10509 0 1 23300 value=-1 prim::ListConstruct pnnx_10510 3 1 10672 23299 23300 10712 Tensor.index Tensor.index_377 2 1 relative_position_bias_table.105 10710 10711 $input=relative_position_bias_table.105 $expr=10710 #relative_position_bias_table.105=(225,6)f32 #10711=(4096,6)f32 prim::Constant pnnx_10512 0 1 23301 value=2 prim::Constant pnnx_10513 0 1 23302 value=0 prim::Constant pnnx_10514 0 1 23303 value=1 prim::ListConstruct pnnx_10515 3 1 23301 23302 23303 10714 Tensor.view Tensor.view_1548 2 1 10711 10712 relative_position_bias.105 $input=10711 $shape=10712 #10711=(4096,6)f32 #relative_position_bias.105=(64,64,6)f32 prim::Constant pnnx_10519 0 1 23305 value=0 torch.permute torch.permute_2734 2 1 relative_position_bias.105 10714 10715 $input=relative_position_bias.105 $dims=10714 #relative_position_bias.105=(64,64,6)f32 #10715=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_156 1 1 10715 relative_position_bias1.33 memory_format=torch.contiguous_format $input=10715 #10715=(6,64,64)f32 #relative_position_bias1.33=(6,64,64)f32 prim::Constant pnnx_10521 0 1 23306 value=1 torch.transpose torch.transpose_3081 3 1 k.105 10670 10671 10706 $input=k.105 $dim0=10670 $dim1=10671 #k.105=(36,6,64,32)f32 #10706=(36,6,32,64)f32 torch.matmul torch.matmul_2306 2 1 q1.33 10706 attn.211 $input=q1.33 $other=10706 #q1.33=(36,6,64,32)f32 #10706=(36,6,32,64)f32 #attn.211=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3319 2 1 relative_position_bias1.33 23305 10717 $input=relative_position_bias1.33 $dim=23305 #relative_position_bias1.33=(6,64,64)f32 #10717=(1,6,64,64)f32 aten::add pnnx_10522 3 1 attn.211 10717 23306 attn2.17 #attn.211=(36,6,64,64)f32 #10717=(1,6,64,64)f32 #attn2.17=(36,6,64,64)f32 prim::Constant pnnx_10523 0 1 23307 value=0 aten::size pnnx_10524 2 1 attn_mask.53 23307 10719 #attn_mask.53=(36,64,64)f32 prim::NumToTensor pnnx_10525 1 1 10719 other.53 aten::Int pnnx_10526 1 1 other.53 10721 prim::Constant pnnx_10527 0 1 23308 value=trunc aten::div pnnx_10528 3 1 B_.105 other.53 23308 10722 aten::Int pnnx_10529 1 1 10722 10723 prim::Constant pnnx_10530 0 1 23309 value=6 prim::ListConstruct pnnx_10531 5 1 10723 10721 23309 10690 10689 10724 prim::Constant pnnx_10533 0 1 23310 value=1 prim::Constant pnnx_10535 0 1 23311 value=0 prim::Constant pnnx_10537 0 1 23312 value=1 Tensor.view Tensor.view_1549 2 1 attn2.17 10724 10725 $input=attn2.17 $shape=10724 #attn2.17=(36,6,64,64)f32 #10725=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3320 2 1 attn_mask.53 23310 10726 $input=attn_mask.53 $dim=23310 #attn_mask.53=(36,64,64)f32 #10726=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3321 2 1 10726 23311 10727 $input=10726 $dim=23311 #10726=(36,1,64,64)f32 #10727=(1,36,1,64,64)f32 aten::add pnnx_10538 3 1 10725 10727 23312 attn3.17 #10725=(1,36,6,64,64)f32 #10727=(1,36,1,64,64)f32 #attn3.17=(1,36,6,64,64)f32 prim::Constant pnnx_10539 0 1 23313 value=-1 prim::Constant pnnx_10540 0 1 23314 value=6 prim::ListConstruct pnnx_10541 4 1 23313 23314 10688 10687 10729 Tensor.view Tensor.view_1550 2 1 attn3.17 10729 input.235 $input=attn3.17 $shape=10729 #attn3.17=(1,36,6,64,64)f32 #input.235=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.3.attn.softmax 1 1 input.235 10731 dim=-1 #input.235=(36,6,64,64)f32 #10731=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.attn.attn_drop 1 1 10731 10732 #10731=(36,6,64,64)f32 #10732=(36,6,64,64)f32 Tensor.select Tensor.select_805 3 1 qkv1.33 23296 23297 v.105 $input=qkv1.33 $dim=23296 $index=23297 #qkv1.33=(3,36,6,64,32)f32 #v.105=(36,6,64,32)f32 prim::Constant pnnx_10544 0 1 23315 value=1 prim::Constant pnnx_10545 0 1 23316 value=2 torch.matmul torch.matmul_2307 2 1 10732 v.105 10733 $input=10732 $other=v.105 #10732=(36,6,64,64)f32 #v.105=(36,6,64,32)f32 #10733=(36,6,64,32)f32 prim::ListConstruct pnnx_10547 3 1 10682 10686 10694 10735 torch.transpose torch.transpose_3082 3 1 10733 23315 23316 10734 $input=10733 $dim0=23315 $dim1=23316 #10733=(36,6,64,32)f32 #10734=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_537 2 1 10734 10735 input1.35 $input=10734 $shape=10735 #10734=(36,64,6,32)f32 #input1.35=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.3.attn.proj 1 1 input1.35 10737 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.35=(36,64,192)f32 #10737=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.attn.proj_drop 1 1 10737 10738 #10737=(36,64,192)f32 #10738=(36,64,192)f32 prim::Constant pnnx_10549 0 1 23317 value=-1 prim::Constant pnnx_10550 0 1 23318 value=8 prim::Constant pnnx_10551 0 1 23319 value=8 prim::ListConstruct pnnx_10552 4 1 23317 23318 23319 10628 10739 prim::Constant pnnx_10554 0 1 23320 value=8 prim::Constant pnnx_10555 0 1 23321 value=trunc aten::div pnnx_10556 3 1 H0.1 23320 23321 10741 aten::Int pnnx_10557 1 1 10741 10742 prim::Constant pnnx_10558 0 1 23322 value=8 prim::Constant pnnx_10559 0 1 23323 value=trunc aten::div pnnx_10560 3 1 W0.1 23322 23323 10743 aten::Int pnnx_10561 1 1 10743 10744 prim::Constant pnnx_10562 0 1 23324 value=1 prim::Constant pnnx_10563 0 1 23325 value=8 prim::Constant pnnx_10564 0 1 23326 value=8 prim::Constant pnnx_10565 0 1 23327 value=-1 prim::ListConstruct pnnx_10566 6 1 23324 10742 10744 23325 23326 23327 10745 prim::Constant pnnx_10568 0 1 23328 value=0 prim::Constant pnnx_10569 0 1 23329 value=1 prim::Constant pnnx_10570 0 1 23330 value=3 prim::Constant pnnx_10571 0 1 23331 value=2 prim::Constant pnnx_10572 0 1 23332 value=4 prim::Constant pnnx_10573 0 1 23333 value=5 prim::ListConstruct pnnx_10574 6 1 23328 23329 23330 23331 23332 23333 10747 Tensor.view Tensor.view_1551 2 1 10738 10739 windows.105 $input=10738 $shape=10739 #10738=(36,64,192)f32 #windows.105=(36,8,8,192)f32 Tensor.view Tensor.view_1552 2 1 windows.105 10745 x9.33 $input=windows.105 $shape=10745 #windows.105=(36,8,8,192)f32 #x9.33=(1,6,6,8,8,192)f32 prim::Constant pnnx_10578 0 1 23335 value=1 prim::Constant pnnx_10579 0 1 23336 value=-1 prim::ListConstruct pnnx_10580 4 1 23335 983 1223 23336 10750 torch.permute torch.permute_2735 2 1 x9.33 10747 10748 $input=x9.33 $dims=10747 #x9.33=(1,6,6,8,8,192)f32 #10748=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_157 1 1 10748 10749 memory_format=torch.contiguous_format $input=10748 #10748=(1,6,8,6,8,192)f32 #10749=(1,6,8,6,8,192)f32 prim::Constant pnnx_10582 0 1 23337 value=4 prim::Constant pnnx_10583 0 1 23338 value=4 prim::ListConstruct pnnx_10584 2 1 23337 23338 10752 prim::Constant pnnx_10585 0 1 23339 value=1 prim::Constant pnnx_10586 0 1 23340 value=2 prim::ListConstruct pnnx_10587 2 1 23339 23340 10753 Tensor.view Tensor.view_1553 2 1 10749 10750 shifted_x.53 $input=10749 $shape=10750 #10749=(1,6,8,6,8,192)f32 #shifted_x.53=(1,48,48,192)f32 aten::mul pnnx_10589 2 1 H0.1 W0.1 10755 aten::Int pnnx_10590 1 1 10755 10756 prim::ListConstruct pnnx_10591 3 1 10623 10756 10627 10757 prim::Constant pnnx_10593 0 1 10759 value=None prim::Constant pnnx_10594 0 1 23341 value=1 torch.roll torch.roll_2471 3 1 shifted_x.53 10752 10753 x10.17 $input=shifted_x.53 $shifts=10752 $dims=10753 #shifted_x.53=(1,48,48,192)f32 #x10.17=(1,48,48,192)f32 Tensor.view Tensor.view_1554 2 1 x10.17 10757 x11.17 $input=x10.17 $shape=10757 #x10.17=(1,48,48,192)f32 #x11.17=(1,2304,192)f32 aten::add pnnx_10595 3 1 10602 x11.17 23341 input.237 #10602=(1,2304,192)f32 #x11.17=(1,2304,192)f32 #input.237=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.3.norm2 1 1 input.237 10761 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.237=(1,2304,192)f32 #10761=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.3.mlp.fc1 1 1 10761 10766 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #10761=(1,2304,192)f32 #10766=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.3.mlp.act 1 1 10766 10767 #10766=(1,2304,384)f32 #10767=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.mlp.drop 1 1 10767 10768 #10767=(1,2304,384)f32 #10768=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.3.mlp.fc2 1 1 10768 10769 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #10768=(1,2304,384)f32 #10769=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.3.mlp.drop 1 1 10769 10770 #10769=(1,2304,192)f32 #10770=(1,2304,192)f32 prim::Constant pnnx_10596 0 1 10771 value=None prim::Constant pnnx_10597 0 1 23342 value=1 aten::add pnnx_10598 3 1 input.237 10770 23342 10772 #input.237=(1,2304,192)f32 #10770=(1,2304,192)f32 #10772=(1,2304,192)f32 prim::Constant pnnx_10599 0 1 10773 value=trunc prim::Constant pnnx_10600 0 1 10774 value=8 prim::Constant pnnx_10601 0 1 10775 value=0 prim::Constant pnnx_10602 0 1 10776 value=2 prim::Constant pnnx_10603 0 1 10777 value=1 prim::Constant pnnx_10604 0 1 10778 value=3 prim::Constant pnnx_10605 0 1 10779 value=8 prim::Constant pnnx_10606 0 1 10780 value=4 prim::Constant pnnx_10607 0 1 10781 value=5 prim::Constant pnnx_10608 0 1 10782 value=-1 prim::Constant pnnx_10609 0 1 10783 value=64 aten::size pnnx_10610 2 1 10772 10775 10789 #10772=(1,2304,192)f32 prim::NumToTensor pnnx_10611 1 1 10789 B.127 aten::Int pnnx_10612 1 1 B.127 10791 aten::Int pnnx_10613 1 1 B.127 10792 aten::size pnnx_10614 2 1 10772 10776 10793 #10772=(1,2304,192)f32 prim::NumToTensor pnnx_10615 1 1 10793 C.219 aten::Int pnnx_10616 1 1 C.219 10795 aten::Int pnnx_10617 1 1 C.219 10796 aten::Int pnnx_10618 1 1 C.219 10797 aten::Int pnnx_10619 1 1 C.219 10798 nn.LayerNorm layers_dfe.2.residual_group.blocks.4.norm1 1 1 10772 10799 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #10772=(1,2304,192)f32 #10799=(1,2304,192)f32 prim::ListConstruct pnnx_10620 4 1 10792 980 1220 10798 10800 prim::Constant pnnx_10622 0 1 23343 value=0 Tensor.view Tensor.view_1555 2 1 10799 10800 x.107 $input=10799 $shape=10800 #10799=(1,2304,192)f32 #x.107=(1,48,48,192)f32 aten::size pnnx_10623 2 1 x.107 23343 10802 #x.107=(1,48,48,192)f32 prim::NumToTensor pnnx_10624 1 1 10802 B1.35 aten::Int pnnx_10625 1 1 B1.35 10804 aten::size pnnx_10626 2 1 x.107 10777 10805 #x.107=(1,48,48,192)f32 prim::NumToTensor pnnx_10627 1 1 10805 10806 prim::Constant pnnx_10628 0 1 23344 value=2 aten::size pnnx_10629 2 1 x.107 23344 10807 #x.107=(1,48,48,192)f32 prim::NumToTensor pnnx_10630 1 1 10807 10808 aten::size pnnx_10631 2 1 x.107 10778 10809 #x.107=(1,48,48,192)f32 prim::NumToTensor pnnx_10632 1 1 10809 C1.35 aten::Int pnnx_10633 1 1 C1.35 10811 aten::Int pnnx_10634 1 1 C1.35 10812 aten::div pnnx_10635 3 1 10806 10774 10773 10813 aten::Int pnnx_10636 1 1 10813 10814 prim::Constant pnnx_10637 0 1 23345 value=8 prim::Constant pnnx_10638 0 1 23346 value=trunc aten::div pnnx_10639 3 1 10808 23345 23346 10815 aten::Int pnnx_10640 1 1 10815 10816 prim::Constant pnnx_10641 0 1 23347 value=8 prim::ListConstruct pnnx_10642 6 1 10804 10814 10779 10816 23347 10812 10817 prim::Constant pnnx_10644 0 1 23348 value=0 prim::Constant pnnx_10645 0 1 23349 value=1 prim::Constant pnnx_10646 0 1 23350 value=3 prim::Constant pnnx_10647 0 1 23351 value=2 prim::ListConstruct pnnx_10648 6 1 23348 23349 23350 23351 10780 10781 10819 Tensor.view Tensor.view_1556 2 1 x.107 10817 x5.55 $input=x.107 $shape=10817 #x.107=(1,48,48,192)f32 #x5.55=(1,6,8,6,8,192)f32 prim::Constant pnnx_10652 0 1 23353 value=8 prim::Constant pnnx_10653 0 1 23354 value=8 prim::ListConstruct pnnx_10654 4 1 10782 23353 23354 10811 10822 torch.permute torch.permute_2736 2 1 x5.55 10819 10820 $input=x5.55 $dims=10819 #x5.55=(1,6,8,6,8,192)f32 #10820=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_158 1 1 10820 10821 memory_format=torch.contiguous_format $input=10820 #10820=(1,6,6,8,8,192)f32 #10821=(1,6,6,8,8,192)f32 prim::Constant pnnx_10656 0 1 23355 value=-1 prim::ListConstruct pnnx_10657 3 1 23355 10783 10797 10824 prim::Constant pnnx_10659 0 1 10826 value=1.767767e-01 prim::Constant pnnx_10660 0 1 10827 value=trunc prim::Constant pnnx_10661 0 1 10828 value=6 prim::Constant pnnx_10662 0 1 10829 value=0 prim::Constant pnnx_10663 0 1 10830 value=1 prim::Constant pnnx_10664 0 1 10831 value=2 prim::Constant pnnx_10665 0 1 10832 value=3 prim::Constant pnnx_10666 0 1 10833 value=6 prim::Constant pnnx_10667 0 1 10834 value=4 prim::Constant pnnx_10668 0 1 10835 value=-2 prim::Constant pnnx_10669 0 1 10836 value=-1 prim::Constant pnnx_10670 0 1 10837 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.4.attn 0 1 relative_position_bias_table.107 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.107=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.4.attn 0 1 relative_position_index.107 @relative_position_index=(64,64)i64 #relative_position_index.107=(64,64)i64 Tensor.view Tensor.view_1557 2 1 10821 10822 x_windows.107 $input=10821 $shape=10822 #10821=(1,6,6,8,8,192)f32 #x_windows.107=(36,8,8,192)f32 Tensor.view Tensor.view_1558 2 1 x_windows.107 10824 x6.35 $input=x_windows.107 $shape=10824 #x_windows.107=(36,8,8,192)f32 #x6.35=(36,64,192)f32 aten::size pnnx_10671 2 1 x6.35 10829 10845 #x6.35=(36,64,192)f32 prim::NumToTensor pnnx_10672 1 1 10845 B_.107 aten::Int pnnx_10673 1 1 B_.107 10847 aten::Int pnnx_10674 1 1 B_.107 10848 aten::size pnnx_10675 2 1 x6.35 10830 10849 #x6.35=(36,64,192)f32 prim::NumToTensor pnnx_10676 1 1 10849 N.107 aten::Int pnnx_10677 1 1 N.107 10851 aten::Int pnnx_10678 1 1 N.107 10852 aten::size pnnx_10679 2 1 x6.35 10831 10853 #x6.35=(36,64,192)f32 prim::NumToTensor pnnx_10680 1 1 10853 C.221 aten::Int pnnx_10681 1 1 C.221 10855 nn.Linear layers_dfe.2.residual_group.blocks.4.attn.qkv 1 1 x6.35 10856 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.35=(36,64,192)f32 #10856=(36,64,576)f32 aten::div pnnx_10682 3 1 C.221 10828 10827 10857 aten::Int pnnx_10683 1 1 10857 10858 prim::ListConstruct pnnx_10684 5 1 10848 10852 10832 10833 10858 10859 prim::Constant pnnx_10686 0 1 23356 value=2 prim::Constant pnnx_10687 0 1 23357 value=0 prim::Constant pnnx_10688 0 1 23358 value=3 prim::Constant pnnx_10689 0 1 23359 value=1 prim::ListConstruct pnnx_10690 5 1 23356 23357 23358 23359 10834 10861 Tensor.reshape Tensor.reshape_538 2 1 10856 10859 10860 $input=10856 $shape=10859 #10856=(36,64,576)f32 #10860=(36,64,3,6,32)f32 prim::Constant pnnx_10692 0 1 23360 value=0 prim::Constant pnnx_10693 0 1 23361 value=0 prim::Constant pnnx_10695 0 1 23362 value=0 prim::Constant pnnx_10696 0 1 23363 value=1 prim::Constant pnnx_10698 0 1 23364 value=0 prim::Constant pnnx_10699 0 1 23365 value=2 torch.permute torch.permute_2737 2 1 10860 10861 qkv1.35 $input=10860 $dims=10861 #10860=(36,64,3,6,32)f32 #qkv1.35=(3,36,6,64,32)f32 Tensor.select Tensor.select_806 3 1 qkv1.35 23360 23361 q.107 $input=qkv1.35 $dim=23360 $index=23361 #qkv1.35=(3,36,6,64,32)f32 #q.107=(36,6,64,32)f32 aten::mul pnnx_10701 2 1 q.107 10826 q1.35 #q.107=(36,6,64,32)f32 #q1.35=(36,6,64,32)f32 Tensor.select Tensor.select_807 3 1 qkv1.35 23362 23363 k.107 $input=qkv1.35 $dim=23362 $index=23363 #qkv1.35=(3,36,6,64,32)f32 #k.107=(36,6,64,32)f32 prim::Constant pnnx_10704 0 1 23366 value=-1 prim::ListConstruct pnnx_10705 1 1 23366 10869 Tensor.view Tensor.view_1559 2 1 relative_position_index.107 10869 10870 $input=relative_position_index.107 $shape=10869 #relative_position_index.107=(64,64)i64 #10870=(4096)i64 prim::ListConstruct pnnx_10707 1 1 10870 10871 #10870=(4096)i64 prim::Constant pnnx_10709 0 1 23367 value=64 prim::Constant pnnx_10710 0 1 23368 value=-1 prim::ListConstruct pnnx_10711 3 1 10837 23367 23368 10873 Tensor.index Tensor.index_378 2 1 relative_position_bias_table.107 10871 10872 $input=relative_position_bias_table.107 $expr=10871 #relative_position_bias_table.107=(225,6)f32 #10872=(4096,6)f32 prim::Constant pnnx_10713 0 1 23369 value=2 prim::Constant pnnx_10714 0 1 23370 value=0 prim::Constant pnnx_10715 0 1 23371 value=1 prim::ListConstruct pnnx_10716 3 1 23369 23370 23371 10875 Tensor.view Tensor.view_1560 2 1 10872 10873 relative_position_bias.107 $input=10872 $shape=10873 #10872=(4096,6)f32 #relative_position_bias.107=(64,64,6)f32 prim::Constant pnnx_10720 0 1 23373 value=0 torch.permute torch.permute_2738 2 1 relative_position_bias.107 10875 10876 $input=relative_position_bias.107 $dims=10875 #relative_position_bias.107=(64,64,6)f32 #10876=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_159 1 1 10876 relative_position_bias1.35 memory_format=torch.contiguous_format $input=10876 #10876=(6,64,64)f32 #relative_position_bias1.35=(6,64,64)f32 prim::Constant pnnx_10722 0 1 23374 value=1 torch.transpose torch.transpose_3083 3 1 k.107 10835 10836 10867 $input=k.107 $dim0=10835 $dim1=10836 #k.107=(36,6,64,32)f32 #10867=(36,6,32,64)f32 torch.matmul torch.matmul_2308 2 1 q1.35 10867 attn.215 $input=q1.35 $other=10867 #q1.35=(36,6,64,32)f32 #10867=(36,6,32,64)f32 #attn.215=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3322 2 1 relative_position_bias1.35 23373 10878 $input=relative_position_bias1.35 $dim=23373 #relative_position_bias1.35=(6,64,64)f32 #10878=(1,6,64,64)f32 aten::add pnnx_10723 3 1 attn.215 10878 23374 input.239 #attn.215=(36,6,64,64)f32 #10878=(1,6,64,64)f32 #input.239=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.4.attn.softmax 1 1 input.239 10880 dim=-1 #input.239=(36,6,64,64)f32 #10880=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.attn.attn_drop 1 1 10880 10881 #10880=(36,6,64,64)f32 #10881=(36,6,64,64)f32 Tensor.select Tensor.select_808 3 1 qkv1.35 23364 23365 v.107 $input=qkv1.35 $dim=23364 $index=23365 #qkv1.35=(3,36,6,64,32)f32 #v.107=(36,6,64,32)f32 prim::Constant pnnx_10725 0 1 23375 value=1 prim::Constant pnnx_10726 0 1 23376 value=2 torch.matmul torch.matmul_2309 2 1 10881 v.107 10882 $input=10881 $other=v.107 #10881=(36,6,64,64)f32 #v.107=(36,6,64,32)f32 #10882=(36,6,64,32)f32 prim::ListConstruct pnnx_10728 3 1 10847 10851 10855 10884 torch.transpose torch.transpose_3084 3 1 10882 23375 23376 10883 $input=10882 $dim0=23375 $dim1=23376 #10882=(36,6,64,32)f32 #10883=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_539 2 1 10883 10884 input1.37 $input=10883 $shape=10884 #10883=(36,64,6,32)f32 #input1.37=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.4.attn.proj 1 1 input1.37 10886 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.37=(36,64,192)f32 #10886=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.attn.proj_drop 1 1 10886 10887 #10886=(36,64,192)f32 #10887=(36,64,192)f32 prim::Constant pnnx_10730 0 1 23377 value=-1 prim::Constant pnnx_10731 0 1 23378 value=8 prim::Constant pnnx_10732 0 1 23379 value=8 prim::ListConstruct pnnx_10733 4 1 23377 23378 23379 10796 10888 prim::Constant pnnx_10735 0 1 23380 value=8 prim::Constant pnnx_10736 0 1 23381 value=trunc aten::div pnnx_10737 3 1 H0.1 23380 23381 10890 aten::Int pnnx_10738 1 1 10890 10891 prim::Constant pnnx_10739 0 1 23382 value=8 prim::Constant pnnx_10740 0 1 23383 value=trunc aten::div pnnx_10741 3 1 W0.1 23382 23383 10892 aten::Int pnnx_10742 1 1 10892 10893 prim::Constant pnnx_10743 0 1 23384 value=1 prim::Constant pnnx_10744 0 1 23385 value=8 prim::Constant pnnx_10745 0 1 23386 value=8 prim::Constant pnnx_10746 0 1 23387 value=-1 prim::ListConstruct pnnx_10747 6 1 23384 10891 10893 23385 23386 23387 10894 prim::Constant pnnx_10749 0 1 23388 value=0 prim::Constant pnnx_10750 0 1 23389 value=1 prim::Constant pnnx_10751 0 1 23390 value=3 prim::Constant pnnx_10752 0 1 23391 value=2 prim::Constant pnnx_10753 0 1 23392 value=4 prim::Constant pnnx_10754 0 1 23393 value=5 prim::ListConstruct pnnx_10755 6 1 23388 23389 23390 23391 23392 23393 10896 Tensor.view Tensor.view_1561 2 1 10887 10888 windows.107 $input=10887 $shape=10888 #10887=(36,64,192)f32 #windows.107=(36,8,8,192)f32 Tensor.view Tensor.view_1562 2 1 windows.107 10894 x7.35 $input=windows.107 $shape=10894 #windows.107=(36,8,8,192)f32 #x7.35=(1,6,6,8,8,192)f32 prim::Constant pnnx_10759 0 1 23395 value=1 prim::Constant pnnx_10760 0 1 23396 value=-1 prim::ListConstruct pnnx_10761 4 1 23395 977 1217 23396 10899 torch.permute torch.permute_2739 2 1 x7.35 10896 10897 $input=x7.35 $dims=10896 #x7.35=(1,6,6,8,8,192)f32 #10897=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_160 1 1 10897 10898 memory_format=torch.contiguous_format $input=10897 #10897=(1,6,8,6,8,192)f32 #10898=(1,6,8,6,8,192)f32 aten::mul pnnx_10763 2 1 H0.1 W0.1 10901 aten::Int pnnx_10764 1 1 10901 10902 prim::ListConstruct pnnx_10765 3 1 10791 10902 10795 10903 prim::Constant pnnx_10767 0 1 10905 value=None prim::Constant pnnx_10768 0 1 23397 value=1 Tensor.view Tensor.view_1563 2 1 10898 10899 x8.35 $input=10898 $shape=10899 #10898=(1,6,8,6,8,192)f32 #x8.35=(1,48,48,192)f32 Tensor.view Tensor.view_1564 2 1 x8.35 10903 x9.35 $input=x8.35 $shape=10903 #x8.35=(1,48,48,192)f32 #x9.35=(1,2304,192)f32 aten::add pnnx_10769 3 1 10772 x9.35 23397 input.241 #10772=(1,2304,192)f32 #x9.35=(1,2304,192)f32 #input.241=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.4.norm2 1 1 input.241 10907 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.241=(1,2304,192)f32 #10907=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.4.mlp.fc1 1 1 10907 10912 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #10907=(1,2304,192)f32 #10912=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.4.mlp.act 1 1 10912 10913 #10912=(1,2304,384)f32 #10913=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.mlp.drop 1 1 10913 10914 #10913=(1,2304,384)f32 #10914=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.4.mlp.fc2 1 1 10914 10915 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #10914=(1,2304,384)f32 #10915=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.4.mlp.drop 1 1 10915 10916 #10915=(1,2304,192)f32 #10916=(1,2304,192)f32 prim::Constant pnnx_10770 0 1 10917 value=None prim::Constant pnnx_10771 0 1 23398 value=1 aten::add pnnx_10772 3 1 input.241 10916 23398 10918 #input.241=(1,2304,192)f32 #10916=(1,2304,192)f32 #10918=(1,2304,192)f32 prim::Constant pnnx_10773 0 1 10919 value=trunc prim::Constant pnnx_10774 0 1 10920 value=8 prim::Constant pnnx_10775 0 1 10921 value=0 prim::Constant pnnx_10776 0 1 10922 value=2 prim::Constant pnnx_10777 0 1 10923 value=-4 prim::Constant pnnx_10778 0 1 10924 value=1 prim::Constant pnnx_10779 0 1 10925 value=3 prim::Constant pnnx_10780 0 1 10926 value=8 prim::Constant pnnx_10781 0 1 10927 value=4 prim::Constant pnnx_10782 0 1 10928 value=5 prim::Constant pnnx_10783 0 1 10929 value=-1 prim::Constant pnnx_10784 0 1 10930 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.5 0 1 attn_mask.55 @attn_mask=(36,64,64)f32 #attn_mask.55=(36,64,64)f32 aten::size pnnx_10785 2 1 10918 10921 10937 #10918=(1,2304,192)f32 prim::NumToTensor pnnx_10786 1 1 10937 B.129 aten::Int pnnx_10787 1 1 B.129 10939 aten::Int pnnx_10788 1 1 B.129 10940 aten::size pnnx_10789 2 1 10918 10922 10941 #10918=(1,2304,192)f32 prim::NumToTensor pnnx_10790 1 1 10941 C.223 aten::Int pnnx_10791 1 1 C.223 10943 aten::Int pnnx_10792 1 1 C.223 10944 aten::Int pnnx_10793 1 1 C.223 10945 aten::Int pnnx_10794 1 1 C.223 10946 nn.LayerNorm layers_dfe.2.residual_group.blocks.5.norm1 1 1 10918 10947 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #10918=(1,2304,192)f32 #10947=(1,2304,192)f32 prim::ListConstruct pnnx_10795 4 1 10940 974 1214 10946 10948 prim::Constant pnnx_10797 0 1 23399 value=-4 prim::ListConstruct pnnx_10798 2 1 10923 23399 10950 prim::Constant pnnx_10799 0 1 23400 value=2 prim::ListConstruct pnnx_10800 2 1 10924 23400 10951 Tensor.view Tensor.view_1565 2 1 10947 10948 x.109 $input=10947 $shape=10948 #10947=(1,2304,192)f32 #x.109=(1,48,48,192)f32 prim::Constant pnnx_10802 0 1 23401 value=0 torch.roll torch.roll_2472 3 1 x.109 10950 10951 x6.37 $input=x.109 $shifts=10950 $dims=10951 #x.109=(1,48,48,192)f32 #x6.37=(1,48,48,192)f32 aten::size pnnx_10803 2 1 x6.37 23401 10953 #x6.37=(1,48,48,192)f32 prim::NumToTensor pnnx_10804 1 1 10953 B1.37 aten::Int pnnx_10805 1 1 B1.37 10955 prim::Constant pnnx_10806 0 1 23402 value=1 aten::size pnnx_10807 2 1 x6.37 23402 10956 #x6.37=(1,48,48,192)f32 prim::NumToTensor pnnx_10808 1 1 10956 10957 prim::Constant pnnx_10809 0 1 23403 value=2 aten::size pnnx_10810 2 1 x6.37 23403 10958 #x6.37=(1,48,48,192)f32 prim::NumToTensor pnnx_10811 1 1 10958 10959 aten::size pnnx_10812 2 1 x6.37 10925 10960 #x6.37=(1,48,48,192)f32 prim::NumToTensor pnnx_10813 1 1 10960 C1.37 aten::Int pnnx_10814 1 1 C1.37 10962 aten::Int pnnx_10815 1 1 C1.37 10963 aten::div pnnx_10816 3 1 10957 10920 10919 10964 aten::Int pnnx_10817 1 1 10964 10965 prim::Constant pnnx_10818 0 1 23404 value=8 prim::Constant pnnx_10819 0 1 23405 value=trunc aten::div pnnx_10820 3 1 10959 23404 23405 10966 aten::Int pnnx_10821 1 1 10966 10967 prim::Constant pnnx_10822 0 1 23406 value=8 prim::ListConstruct pnnx_10823 6 1 10955 10965 10926 10967 23406 10963 10968 prim::Constant pnnx_10825 0 1 23407 value=0 prim::Constant pnnx_10826 0 1 23408 value=1 prim::Constant pnnx_10827 0 1 23409 value=3 prim::Constant pnnx_10828 0 1 23410 value=2 prim::ListConstruct pnnx_10829 6 1 23407 23408 23409 23410 10927 10928 10970 Tensor.view Tensor.view_1566 2 1 x6.37 10968 x7.37 $input=x6.37 $shape=10968 #x6.37=(1,48,48,192)f32 #x7.37=(1,6,8,6,8,192)f32 prim::Constant pnnx_10833 0 1 23412 value=8 prim::Constant pnnx_10834 0 1 23413 value=8 prim::ListConstruct pnnx_10835 4 1 10929 23412 23413 10962 10973 torch.permute torch.permute_2740 2 1 x7.37 10970 10971 $input=x7.37 $dims=10970 #x7.37=(1,6,8,6,8,192)f32 #10971=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_161 1 1 10971 10972 memory_format=torch.contiguous_format $input=10971 #10971=(1,6,6,8,8,192)f32 #10972=(1,6,6,8,8,192)f32 prim::Constant pnnx_10837 0 1 23414 value=-1 prim::ListConstruct pnnx_10838 3 1 23414 10930 10945 10975 prim::Constant pnnx_10840 0 1 10977 value=1.767767e-01 prim::Constant pnnx_10841 0 1 10978 value=trunc prim::Constant pnnx_10842 0 1 10979 value=6 prim::Constant pnnx_10843 0 1 10980 value=0 prim::Constant pnnx_10844 0 1 10981 value=1 prim::Constant pnnx_10845 0 1 10982 value=2 prim::Constant pnnx_10846 0 1 10983 value=3 prim::Constant pnnx_10847 0 1 10984 value=6 prim::Constant pnnx_10848 0 1 10985 value=4 prim::Constant pnnx_10849 0 1 10986 value=-2 prim::Constant pnnx_10850 0 1 10987 value=-1 prim::Constant pnnx_10851 0 1 10988 value=64 pnnx.Attribute layers_dfe.2.residual_group.blocks.5.attn 0 1 relative_position_bias_table.109 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.109=(225,6)f32 pnnx.Attribute layers_dfe.2.residual_group.blocks.5.attn 0 1 relative_position_index.109 @relative_position_index=(64,64)i64 #relative_position_index.109=(64,64)i64 Tensor.view Tensor.view_1567 2 1 10972 10973 x_windows.109 $input=10972 $shape=10973 #10972=(1,6,6,8,8,192)f32 #x_windows.109=(36,8,8,192)f32 Tensor.view Tensor.view_1568 2 1 x_windows.109 10975 x8.37 $input=x_windows.109 $shape=10975 #x_windows.109=(36,8,8,192)f32 #x8.37=(36,64,192)f32 aten::size pnnx_10852 2 1 x8.37 10980 10996 #x8.37=(36,64,192)f32 prim::NumToTensor pnnx_10853 1 1 10996 B_.109 aten::Int pnnx_10854 1 1 B_.109 10998 aten::Int pnnx_10855 1 1 B_.109 10999 aten::size pnnx_10856 2 1 x8.37 10981 11000 #x8.37=(36,64,192)f32 prim::NumToTensor pnnx_10857 1 1 11000 N.109 aten::Int pnnx_10858 1 1 N.109 11002 aten::Int pnnx_10859 1 1 N.109 11003 aten::Int pnnx_10860 1 1 N.109 11004 aten::Int pnnx_10861 1 1 N.109 11005 aten::Int pnnx_10862 1 1 N.109 11006 aten::Int pnnx_10863 1 1 N.109 11007 aten::size pnnx_10864 2 1 x8.37 10982 11008 #x8.37=(36,64,192)f32 prim::NumToTensor pnnx_10865 1 1 11008 C.225 aten::Int pnnx_10866 1 1 C.225 11010 nn.Linear layers_dfe.2.residual_group.blocks.5.attn.qkv 1 1 x8.37 11011 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.37=(36,64,192)f32 #11011=(36,64,576)f32 aten::div pnnx_10867 3 1 C.225 10979 10978 11012 aten::Int pnnx_10868 1 1 11012 11013 prim::ListConstruct pnnx_10869 5 1 10999 11007 10983 10984 11013 11014 prim::Constant pnnx_10871 0 1 23415 value=2 prim::Constant pnnx_10872 0 1 23416 value=0 prim::Constant pnnx_10873 0 1 23417 value=3 prim::Constant pnnx_10874 0 1 23418 value=1 prim::ListConstruct pnnx_10875 5 1 23415 23416 23417 23418 10985 11016 Tensor.reshape Tensor.reshape_540 2 1 11011 11014 11015 $input=11011 $shape=11014 #11011=(36,64,576)f32 #11015=(36,64,3,6,32)f32 prim::Constant pnnx_10877 0 1 23419 value=0 prim::Constant pnnx_10878 0 1 23420 value=0 prim::Constant pnnx_10880 0 1 23421 value=0 prim::Constant pnnx_10881 0 1 23422 value=1 prim::Constant pnnx_10883 0 1 23423 value=0 prim::Constant pnnx_10884 0 1 23424 value=2 torch.permute torch.permute_2741 2 1 11015 11016 qkv1.37 $input=11015 $dims=11016 #11015=(36,64,3,6,32)f32 #qkv1.37=(3,36,6,64,32)f32 Tensor.select Tensor.select_809 3 1 qkv1.37 23419 23420 q.109 $input=qkv1.37 $dim=23419 $index=23420 #qkv1.37=(3,36,6,64,32)f32 #q.109=(36,6,64,32)f32 aten::mul pnnx_10886 2 1 q.109 10977 q1.37 #q.109=(36,6,64,32)f32 #q1.37=(36,6,64,32)f32 Tensor.select Tensor.select_810 3 1 qkv1.37 23421 23422 k.109 $input=qkv1.37 $dim=23421 $index=23422 #qkv1.37=(3,36,6,64,32)f32 #k.109=(36,6,64,32)f32 prim::Constant pnnx_10889 0 1 23425 value=-1 prim::ListConstruct pnnx_10890 1 1 23425 11024 Tensor.view Tensor.view_1569 2 1 relative_position_index.109 11024 11025 $input=relative_position_index.109 $shape=11024 #relative_position_index.109=(64,64)i64 #11025=(4096)i64 prim::ListConstruct pnnx_10892 1 1 11025 11026 #11025=(4096)i64 prim::Constant pnnx_10894 0 1 23426 value=64 prim::Constant pnnx_10895 0 1 23427 value=-1 prim::ListConstruct pnnx_10896 3 1 10988 23426 23427 11028 Tensor.index Tensor.index_379 2 1 relative_position_bias_table.109 11026 11027 $input=relative_position_bias_table.109 $expr=11026 #relative_position_bias_table.109=(225,6)f32 #11027=(4096,6)f32 prim::Constant pnnx_10898 0 1 23428 value=2 prim::Constant pnnx_10899 0 1 23429 value=0 prim::Constant pnnx_10900 0 1 23430 value=1 prim::ListConstruct pnnx_10901 3 1 23428 23429 23430 11030 Tensor.view Tensor.view_1570 2 1 11027 11028 relative_position_bias.109 $input=11027 $shape=11028 #11027=(4096,6)f32 #relative_position_bias.109=(64,64,6)f32 prim::Constant pnnx_10905 0 1 23432 value=0 torch.permute torch.permute_2742 2 1 relative_position_bias.109 11030 11031 $input=relative_position_bias.109 $dims=11030 #relative_position_bias.109=(64,64,6)f32 #11031=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_162 1 1 11031 relative_position_bias1.37 memory_format=torch.contiguous_format $input=11031 #11031=(6,64,64)f32 #relative_position_bias1.37=(6,64,64)f32 prim::Constant pnnx_10907 0 1 23433 value=1 torch.transpose torch.transpose_3085 3 1 k.109 10986 10987 11022 $input=k.109 $dim0=10986 $dim1=10987 #k.109=(36,6,64,32)f32 #11022=(36,6,32,64)f32 torch.matmul torch.matmul_2310 2 1 q1.37 11022 attn.219 $input=q1.37 $other=11022 #q1.37=(36,6,64,32)f32 #11022=(36,6,32,64)f32 #attn.219=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3323 2 1 relative_position_bias1.37 23432 11033 $input=relative_position_bias1.37 $dim=23432 #relative_position_bias1.37=(6,64,64)f32 #11033=(1,6,64,64)f32 aten::add pnnx_10908 3 1 attn.219 11033 23433 attn2.19 #attn.219=(36,6,64,64)f32 #11033=(1,6,64,64)f32 #attn2.19=(36,6,64,64)f32 prim::Constant pnnx_10909 0 1 23434 value=0 aten::size pnnx_10910 2 1 attn_mask.55 23434 11035 #attn_mask.55=(36,64,64)f32 prim::NumToTensor pnnx_10911 1 1 11035 other.55 aten::Int pnnx_10912 1 1 other.55 11037 prim::Constant pnnx_10913 0 1 23435 value=trunc aten::div pnnx_10914 3 1 B_.109 other.55 23435 11038 aten::Int pnnx_10915 1 1 11038 11039 prim::Constant pnnx_10916 0 1 23436 value=6 prim::ListConstruct pnnx_10917 5 1 11039 11037 23436 11006 11005 11040 prim::Constant pnnx_10919 0 1 23437 value=1 prim::Constant pnnx_10921 0 1 23438 value=0 prim::Constant pnnx_10923 0 1 23439 value=1 Tensor.view Tensor.view_1571 2 1 attn2.19 11040 11041 $input=attn2.19 $shape=11040 #attn2.19=(36,6,64,64)f32 #11041=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3324 2 1 attn_mask.55 23437 11042 $input=attn_mask.55 $dim=23437 #attn_mask.55=(36,64,64)f32 #11042=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3325 2 1 11042 23438 11043 $input=11042 $dim=23438 #11042=(36,1,64,64)f32 #11043=(1,36,1,64,64)f32 aten::add pnnx_10924 3 1 11041 11043 23439 attn3.19 #11041=(1,36,6,64,64)f32 #11043=(1,36,1,64,64)f32 #attn3.19=(1,36,6,64,64)f32 prim::Constant pnnx_10925 0 1 23440 value=-1 prim::Constant pnnx_10926 0 1 23441 value=6 prim::ListConstruct pnnx_10927 4 1 23440 23441 11004 11003 11045 Tensor.view Tensor.view_1572 2 1 attn3.19 11045 input.243 $input=attn3.19 $shape=11045 #attn3.19=(1,36,6,64,64)f32 #input.243=(36,6,64,64)f32 nn.Softmax layers_dfe.2.residual_group.blocks.5.attn.softmax 1 1 input.243 11047 dim=-1 #input.243=(36,6,64,64)f32 #11047=(36,6,64,64)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.attn.attn_drop 1 1 11047 11048 #11047=(36,6,64,64)f32 #11048=(36,6,64,64)f32 Tensor.select Tensor.select_811 3 1 qkv1.37 23423 23424 v.109 $input=qkv1.37 $dim=23423 $index=23424 #qkv1.37=(3,36,6,64,32)f32 #v.109=(36,6,64,32)f32 prim::Constant pnnx_10930 0 1 23442 value=1 prim::Constant pnnx_10931 0 1 23443 value=2 torch.matmul torch.matmul_2311 2 1 11048 v.109 11049 $input=11048 $other=v.109 #11048=(36,6,64,64)f32 #v.109=(36,6,64,32)f32 #11049=(36,6,64,32)f32 prim::ListConstruct pnnx_10933 3 1 10998 11002 11010 11051 torch.transpose torch.transpose_3086 3 1 11049 23442 23443 11050 $input=11049 $dim0=23442 $dim1=23443 #11049=(36,6,64,32)f32 #11050=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_541 2 1 11050 11051 input1.39 $input=11050 $shape=11051 #11050=(36,64,6,32)f32 #input1.39=(36,64,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.5.attn.proj 1 1 input1.39 11053 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.39=(36,64,192)f32 #11053=(36,64,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.attn.proj_drop 1 1 11053 11054 #11053=(36,64,192)f32 #11054=(36,64,192)f32 prim::Constant pnnx_10935 0 1 23444 value=-1 prim::Constant pnnx_10936 0 1 23445 value=8 prim::Constant pnnx_10937 0 1 23446 value=8 prim::ListConstruct pnnx_10938 4 1 23444 23445 23446 10944 11055 prim::Constant pnnx_10940 0 1 23447 value=8 prim::Constant pnnx_10941 0 1 23448 value=trunc aten::div pnnx_10942 3 1 H0.1 23447 23448 11057 aten::Int pnnx_10943 1 1 11057 11058 prim::Constant pnnx_10944 0 1 23449 value=8 prim::Constant pnnx_10945 0 1 23450 value=trunc aten::div pnnx_10946 3 1 W0.1 23449 23450 11059 aten::Int pnnx_10947 1 1 11059 11060 prim::Constant pnnx_10948 0 1 23451 value=1 prim::Constant pnnx_10949 0 1 23452 value=8 prim::Constant pnnx_10950 0 1 23453 value=8 prim::Constant pnnx_10951 0 1 23454 value=-1 prim::ListConstruct pnnx_10952 6 1 23451 11058 11060 23452 23453 23454 11061 prim::Constant pnnx_10954 0 1 23455 value=0 prim::Constant pnnx_10955 0 1 23456 value=1 prim::Constant pnnx_10956 0 1 23457 value=3 prim::Constant pnnx_10957 0 1 23458 value=2 prim::Constant pnnx_10958 0 1 23459 value=4 prim::Constant pnnx_10959 0 1 23460 value=5 prim::ListConstruct pnnx_10960 6 1 23455 23456 23457 23458 23459 23460 11063 Tensor.view Tensor.view_1573 2 1 11054 11055 windows.109 $input=11054 $shape=11055 #11054=(36,64,192)f32 #windows.109=(36,8,8,192)f32 Tensor.view Tensor.view_1574 2 1 windows.109 11061 x9.37 $input=windows.109 $shape=11061 #windows.109=(36,8,8,192)f32 #x9.37=(1,6,6,8,8,192)f32 prim::Constant pnnx_10964 0 1 23462 value=1 prim::Constant pnnx_10965 0 1 23463 value=-1 prim::ListConstruct pnnx_10966 4 1 23462 971 1211 23463 11066 torch.permute torch.permute_2743 2 1 x9.37 11063 11064 $input=x9.37 $dims=11063 #x9.37=(1,6,6,8,8,192)f32 #11064=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_163 1 1 11064 11065 memory_format=torch.contiguous_format $input=11064 #11064=(1,6,8,6,8,192)f32 #11065=(1,6,8,6,8,192)f32 prim::Constant pnnx_10968 0 1 23464 value=4 prim::Constant pnnx_10969 0 1 23465 value=4 prim::ListConstruct pnnx_10970 2 1 23464 23465 11068 prim::Constant pnnx_10971 0 1 23466 value=1 prim::Constant pnnx_10972 0 1 23467 value=2 prim::ListConstruct pnnx_10973 2 1 23466 23467 11069 Tensor.view Tensor.view_1575 2 1 11065 11066 shifted_x.55 $input=11065 $shape=11066 #11065=(1,6,8,6,8,192)f32 #shifted_x.55=(1,48,48,192)f32 aten::mul pnnx_10975 2 1 H0.1 W0.1 11071 aten::Int pnnx_10976 1 1 11071 11072 prim::ListConstruct pnnx_10977 3 1 10939 11072 10943 11073 prim::Constant pnnx_10979 0 1 11075 value=None prim::Constant pnnx_10980 0 1 23468 value=1 torch.roll torch.roll_2473 3 1 shifted_x.55 11068 11069 x10.19 $input=shifted_x.55 $shifts=11068 $dims=11069 #shifted_x.55=(1,48,48,192)f32 #x10.19=(1,48,48,192)f32 Tensor.view Tensor.view_1576 2 1 x10.19 11073 x11.19 $input=x10.19 $shape=11073 #x10.19=(1,48,48,192)f32 #x11.19=(1,2304,192)f32 aten::add pnnx_10981 3 1 10918 x11.19 23468 input.245 #10918=(1,2304,192)f32 #x11.19=(1,2304,192)f32 #input.245=(1,2304,192)f32 nn.LayerNorm layers_dfe.2.residual_group.blocks.5.norm2 1 1 input.245 11077 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.245=(1,2304,192)f32 #11077=(1,2304,192)f32 nn.Linear layers_dfe.2.residual_group.blocks.5.mlp.fc1 1 1 11077 11082 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #11077=(1,2304,192)f32 #11082=(1,2304,384)f32 nn.GELU layers_dfe.2.residual_group.blocks.5.mlp.act 1 1 11082 11083 #11082=(1,2304,384)f32 #11083=(1,2304,384)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.mlp.drop 1 1 11083 11084 #11083=(1,2304,384)f32 #11084=(1,2304,384)f32 nn.Linear layers_dfe.2.residual_group.blocks.5.mlp.fc2 1 1 11084 11085 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #11084=(1,2304,384)f32 #11085=(1,2304,192)f32 nn.Dropout layers_dfe.2.residual_group.blocks.5.mlp.drop 1 1 11085 11086 #11085=(1,2304,192)f32 #11086=(1,2304,192)f32 prim::Constant pnnx_10982 0 1 11087 value=None prim::Constant pnnx_10983 0 1 23469 value=1 aten::add pnnx_10984 3 1 input.245 11086 23469 11088 #input.245=(1,2304,192)f32 #11086=(1,2304,192)f32 #11088=(1,2304,192)f32 prim::Constant pnnx_10985 0 1 11089 value=0 prim::Constant pnnx_10986 0 1 11090 value=1 prim::Constant pnnx_10987 0 1 11091 value=2 prim::Constant pnnx_10988 0 1 11092 value=192 aten::size pnnx_10989 2 1 11088 11089 11093 #11088=(1,2304,192)f32 prim::NumToTensor pnnx_10990 1 1 11093 B.131 aten::Int pnnx_10991 1 1 B.131 11095 prim::ListConstruct pnnx_10993 4 1 11095 11092 968 1208 11097 torch.transpose torch.transpose_3087 3 1 11088 11090 11091 11096 $input=11088 $dim0=11090 $dim1=11091 #11088=(1,2304,192)f32 #11096=(1,192,2304)f32 Tensor.view Tensor.view_1577 2 1 11096 11097 input.247 $input=11096 $shape=11097 #11096=(1,192,2304)f32 #input.247=(1,192,48,48)f32 nn.Conv2d layers_dfe.2.conv 1 1 input.247 11099 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.247=(1,192,48,48)f32 #11099=(1,192,48,48)f32 prim::Constant pnnx_10995 0 1 11100 value=-1 prim::Constant pnnx_10996 0 1 11101 value=2 prim::Constant pnnx_10997 0 1 11102 value=1 prim::Constant pnnx_10999 0 1 23470 value=2 torch.flatten torch.flatten_2193 3 1 11099 11101 11100 11103 $input=11099 $start_dim=11101 $end_dim=11100 #11099=(1,192,48,48)f32 #11103=(1,192,2304)f32 torch.transpose torch.transpose_3088 3 1 11103 11102 23470 11104 $input=11103 $dim0=11102 $dim1=23470 #11103=(1,192,2304)f32 #11104=(1,2304,192)f32 aten::add pnnx_11001 3 1 11104 10123 10124 11105 #11104=(1,2304,192)f32 #10123=(1,2304,192)f32 #11105=(1,2304,192)f32 prim::Constant pnnx_11002 0 1 11106 value=1 prim::Constant pnnx_11003 0 1 11123 value=trunc prim::Constant pnnx_11004 0 1 11124 value=8 prim::Constant pnnx_11005 0 1 11125 value=0 prim::Constant pnnx_11006 0 1 11126 value=2 prim::Constant pnnx_11007 0 1 11127 value=1 prim::Constant pnnx_11008 0 1 11128 value=3 prim::Constant pnnx_11009 0 1 11129 value=8 prim::Constant pnnx_11010 0 1 11130 value=4 prim::Constant pnnx_11011 0 1 11131 value=5 prim::Constant pnnx_11012 0 1 11132 value=-1 prim::Constant pnnx_11013 0 1 11133 value=64 aten::size pnnx_11014 2 1 11105 11125 11139 #11105=(1,2304,192)f32 prim::NumToTensor pnnx_11015 1 1 11139 B.133 aten::Int pnnx_11016 1 1 B.133 11141 aten::Int pnnx_11017 1 1 B.133 11142 aten::size pnnx_11018 2 1 11105 11126 11143 #11105=(1,2304,192)f32 prim::NumToTensor pnnx_11019 1 1 11143 C.227 aten::Int pnnx_11020 1 1 C.227 11145 aten::Int pnnx_11021 1 1 C.227 11146 aten::Int pnnx_11022 1 1 C.227 11147 aten::Int pnnx_11023 1 1 C.227 11148 nn.LayerNorm layers_dfe.3.residual_group.blocks.0.norm1 1 1 11105 11149 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #11105=(1,2304,192)f32 #11149=(1,2304,192)f32 prim::ListConstruct pnnx_11024 4 1 11142 965 1205 11148 11150 prim::Constant pnnx_11026 0 1 23471 value=0 Tensor.view Tensor.view_1578 2 1 11149 11150 x.111 $input=11149 $shape=11150 #11149=(1,2304,192)f32 #x.111=(1,48,48,192)f32 aten::size pnnx_11027 2 1 x.111 23471 11152 #x.111=(1,48,48,192)f32 prim::NumToTensor pnnx_11028 1 1 11152 B1.39 aten::Int pnnx_11029 1 1 B1.39 11154 aten::size pnnx_11030 2 1 x.111 11127 11155 #x.111=(1,48,48,192)f32 prim::NumToTensor pnnx_11031 1 1 11155 11156 prim::Constant pnnx_11032 0 1 23472 value=2 aten::size pnnx_11033 2 1 x.111 23472 11157 #x.111=(1,48,48,192)f32 prim::NumToTensor pnnx_11034 1 1 11157 11158 aten::size pnnx_11035 2 1 x.111 11128 11159 #x.111=(1,48,48,192)f32 prim::NumToTensor pnnx_11036 1 1 11159 C1.39 aten::Int pnnx_11037 1 1 C1.39 11161 aten::Int pnnx_11038 1 1 C1.39 11162 aten::div pnnx_11039 3 1 11156 11124 11123 11163 aten::Int pnnx_11040 1 1 11163 11164 prim::Constant pnnx_11041 0 1 23473 value=8 prim::Constant pnnx_11042 0 1 23474 value=trunc aten::div pnnx_11043 3 1 11158 23473 23474 11165 aten::Int pnnx_11044 1 1 11165 11166 prim::Constant pnnx_11045 0 1 23475 value=8 prim::ListConstruct pnnx_11046 6 1 11154 11164 11129 11166 23475 11162 11167 prim::Constant pnnx_11048 0 1 23476 value=0 prim::Constant pnnx_11049 0 1 23477 value=1 prim::Constant pnnx_11050 0 1 23478 value=3 prim::Constant pnnx_11051 0 1 23479 value=2 prim::ListConstruct pnnx_11052 6 1 23476 23477 23478 23479 11130 11131 11169 Tensor.view Tensor.view_1579 2 1 x.111 11167 x5.57 $input=x.111 $shape=11167 #x.111=(1,48,48,192)f32 #x5.57=(1,6,8,6,8,192)f32 prim::Constant pnnx_11056 0 1 23481 value=8 prim::Constant pnnx_11057 0 1 23482 value=8 prim::ListConstruct pnnx_11058 4 1 11132 23481 23482 11161 11172 torch.permute torch.permute_2744 2 1 x5.57 11169 11170 $input=x5.57 $dims=11169 #x5.57=(1,6,8,6,8,192)f32 #11170=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_164 1 1 11170 11171 memory_format=torch.contiguous_format $input=11170 #11170=(1,6,6,8,8,192)f32 #11171=(1,6,6,8,8,192)f32 prim::Constant pnnx_11060 0 1 23483 value=-1 prim::ListConstruct pnnx_11061 3 1 23483 11133 11147 11174 prim::Constant pnnx_11063 0 1 11176 value=1.767767e-01 prim::Constant pnnx_11064 0 1 11177 value=trunc prim::Constant pnnx_11065 0 1 11178 value=6 prim::Constant pnnx_11066 0 1 11179 value=0 prim::Constant pnnx_11067 0 1 11180 value=1 prim::Constant pnnx_11068 0 1 11181 value=2 prim::Constant pnnx_11069 0 1 11182 value=3 prim::Constant pnnx_11070 0 1 11183 value=6 prim::Constant pnnx_11071 0 1 11184 value=4 prim::Constant pnnx_11072 0 1 11185 value=-2 prim::Constant pnnx_11073 0 1 11186 value=-1 prim::Constant pnnx_11074 0 1 11187 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.0.attn 0 1 relative_position_bias_table.111 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.111=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.0.attn 0 1 relative_position_index.111 @relative_position_index=(64,64)i64 #relative_position_index.111=(64,64)i64 Tensor.view Tensor.view_1580 2 1 11171 11172 x_windows.111 $input=11171 $shape=11172 #11171=(1,6,6,8,8,192)f32 #x_windows.111=(36,8,8,192)f32 Tensor.view Tensor.view_1581 2 1 x_windows.111 11174 x6.39 $input=x_windows.111 $shape=11174 #x_windows.111=(36,8,8,192)f32 #x6.39=(36,64,192)f32 aten::size pnnx_11075 2 1 x6.39 11179 11195 #x6.39=(36,64,192)f32 prim::NumToTensor pnnx_11076 1 1 11195 B_.111 aten::Int pnnx_11077 1 1 B_.111 11197 aten::Int pnnx_11078 1 1 B_.111 11198 aten::size pnnx_11079 2 1 x6.39 11180 11199 #x6.39=(36,64,192)f32 prim::NumToTensor pnnx_11080 1 1 11199 N.111 aten::Int pnnx_11081 1 1 N.111 11201 aten::Int pnnx_11082 1 1 N.111 11202 aten::size pnnx_11083 2 1 x6.39 11181 11203 #x6.39=(36,64,192)f32 prim::NumToTensor pnnx_11084 1 1 11203 C.229 aten::Int pnnx_11085 1 1 C.229 11205 nn.Linear layers_dfe.3.residual_group.blocks.0.attn.qkv 1 1 x6.39 11206 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.39=(36,64,192)f32 #11206=(36,64,576)f32 aten::div pnnx_11086 3 1 C.229 11178 11177 11207 aten::Int pnnx_11087 1 1 11207 11208 prim::ListConstruct pnnx_11088 5 1 11198 11202 11182 11183 11208 11209 prim::Constant pnnx_11090 0 1 23484 value=2 prim::Constant pnnx_11091 0 1 23485 value=0 prim::Constant pnnx_11092 0 1 23486 value=3 prim::Constant pnnx_11093 0 1 23487 value=1 prim::ListConstruct pnnx_11094 5 1 23484 23485 23486 23487 11184 11211 Tensor.reshape Tensor.reshape_542 2 1 11206 11209 11210 $input=11206 $shape=11209 #11206=(36,64,576)f32 #11210=(36,64,3,6,32)f32 prim::Constant pnnx_11096 0 1 23488 value=0 prim::Constant pnnx_11097 0 1 23489 value=0 prim::Constant pnnx_11099 0 1 23490 value=0 prim::Constant pnnx_11100 0 1 23491 value=1 prim::Constant pnnx_11102 0 1 23492 value=0 prim::Constant pnnx_11103 0 1 23493 value=2 torch.permute torch.permute_2745 2 1 11210 11211 qkv1.39 $input=11210 $dims=11211 #11210=(36,64,3,6,32)f32 #qkv1.39=(3,36,6,64,32)f32 Tensor.select Tensor.select_812 3 1 qkv1.39 23488 23489 q.111 $input=qkv1.39 $dim=23488 $index=23489 #qkv1.39=(3,36,6,64,32)f32 #q.111=(36,6,64,32)f32 aten::mul pnnx_11105 2 1 q.111 11176 q1.39 #q.111=(36,6,64,32)f32 #q1.39=(36,6,64,32)f32 Tensor.select Tensor.select_813 3 1 qkv1.39 23490 23491 k.111 $input=qkv1.39 $dim=23490 $index=23491 #qkv1.39=(3,36,6,64,32)f32 #k.111=(36,6,64,32)f32 prim::Constant pnnx_11108 0 1 23494 value=-1 prim::ListConstruct pnnx_11109 1 1 23494 11219 Tensor.view Tensor.view_1582 2 1 relative_position_index.111 11219 11220 $input=relative_position_index.111 $shape=11219 #relative_position_index.111=(64,64)i64 #11220=(4096)i64 prim::ListConstruct pnnx_11111 1 1 11220 11221 #11220=(4096)i64 prim::Constant pnnx_11113 0 1 23495 value=64 prim::Constant pnnx_11114 0 1 23496 value=-1 prim::ListConstruct pnnx_11115 3 1 11187 23495 23496 11223 Tensor.index Tensor.index_380 2 1 relative_position_bias_table.111 11221 11222 $input=relative_position_bias_table.111 $expr=11221 #relative_position_bias_table.111=(225,6)f32 #11222=(4096,6)f32 prim::Constant pnnx_11117 0 1 23497 value=2 prim::Constant pnnx_11118 0 1 23498 value=0 prim::Constant pnnx_11119 0 1 23499 value=1 prim::ListConstruct pnnx_11120 3 1 23497 23498 23499 11225 Tensor.view Tensor.view_1583 2 1 11222 11223 relative_position_bias.111 $input=11222 $shape=11223 #11222=(4096,6)f32 #relative_position_bias.111=(64,64,6)f32 prim::Constant pnnx_11124 0 1 23501 value=0 torch.permute torch.permute_2746 2 1 relative_position_bias.111 11225 11226 $input=relative_position_bias.111 $dims=11225 #relative_position_bias.111=(64,64,6)f32 #11226=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_165 1 1 11226 relative_position_bias1.39 memory_format=torch.contiguous_format $input=11226 #11226=(6,64,64)f32 #relative_position_bias1.39=(6,64,64)f32 prim::Constant pnnx_11126 0 1 23502 value=1 torch.transpose torch.transpose_3089 3 1 k.111 11185 11186 11217 $input=k.111 $dim0=11185 $dim1=11186 #k.111=(36,6,64,32)f32 #11217=(36,6,32,64)f32 torch.matmul torch.matmul_2312 2 1 q1.39 11217 attn.223 $input=q1.39 $other=11217 #q1.39=(36,6,64,32)f32 #11217=(36,6,32,64)f32 #attn.223=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3326 2 1 relative_position_bias1.39 23501 11228 $input=relative_position_bias1.39 $dim=23501 #relative_position_bias1.39=(6,64,64)f32 #11228=(1,6,64,64)f32 aten::add pnnx_11127 3 1 attn.223 11228 23502 input.249 #attn.223=(36,6,64,64)f32 #11228=(1,6,64,64)f32 #input.249=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.0.attn.softmax 1 1 input.249 11230 dim=-1 #input.249=(36,6,64,64)f32 #11230=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.attn.attn_drop 1 1 11230 11231 #11230=(36,6,64,64)f32 #11231=(36,6,64,64)f32 Tensor.select Tensor.select_814 3 1 qkv1.39 23492 23493 v.111 $input=qkv1.39 $dim=23492 $index=23493 #qkv1.39=(3,36,6,64,32)f32 #v.111=(36,6,64,32)f32 prim::Constant pnnx_11129 0 1 23503 value=1 prim::Constant pnnx_11130 0 1 23504 value=2 torch.matmul torch.matmul_2313 2 1 11231 v.111 11232 $input=11231 $other=v.111 #11231=(36,6,64,64)f32 #v.111=(36,6,64,32)f32 #11232=(36,6,64,32)f32 prim::ListConstruct pnnx_11132 3 1 11197 11201 11205 11234 torch.transpose torch.transpose_3090 3 1 11232 23503 23504 11233 $input=11232 $dim0=23503 $dim1=23504 #11232=(36,6,64,32)f32 #11233=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_543 2 1 11233 11234 input1.41 $input=11233 $shape=11234 #11233=(36,64,6,32)f32 #input1.41=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.0.attn.proj 1 1 input1.41 11236 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.41=(36,64,192)f32 #11236=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.attn.proj_drop 1 1 11236 11237 #11236=(36,64,192)f32 #11237=(36,64,192)f32 prim::Constant pnnx_11134 0 1 23505 value=-1 prim::Constant pnnx_11135 0 1 23506 value=8 prim::Constant pnnx_11136 0 1 23507 value=8 prim::ListConstruct pnnx_11137 4 1 23505 23506 23507 11146 11238 prim::Constant pnnx_11139 0 1 23508 value=8 prim::Constant pnnx_11140 0 1 23509 value=trunc aten::div pnnx_11141 3 1 H0.1 23508 23509 11240 aten::Int pnnx_11142 1 1 11240 11241 prim::Constant pnnx_11143 0 1 23510 value=8 prim::Constant pnnx_11144 0 1 23511 value=trunc aten::div pnnx_11145 3 1 W0.1 23510 23511 11242 aten::Int pnnx_11146 1 1 11242 11243 prim::Constant pnnx_11147 0 1 23512 value=1 prim::Constant pnnx_11148 0 1 23513 value=8 prim::Constant pnnx_11149 0 1 23514 value=8 prim::Constant pnnx_11150 0 1 23515 value=-1 prim::ListConstruct pnnx_11151 6 1 23512 11241 11243 23513 23514 23515 11244 prim::Constant pnnx_11153 0 1 23516 value=0 prim::Constant pnnx_11154 0 1 23517 value=1 prim::Constant pnnx_11155 0 1 23518 value=3 prim::Constant pnnx_11156 0 1 23519 value=2 prim::Constant pnnx_11157 0 1 23520 value=4 prim::Constant pnnx_11158 0 1 23521 value=5 prim::ListConstruct pnnx_11159 6 1 23516 23517 23518 23519 23520 23521 11246 Tensor.view Tensor.view_1584 2 1 11237 11238 windows.111 $input=11237 $shape=11238 #11237=(36,64,192)f32 #windows.111=(36,8,8,192)f32 Tensor.view Tensor.view_1585 2 1 windows.111 11244 x7.39 $input=windows.111 $shape=11244 #windows.111=(36,8,8,192)f32 #x7.39=(1,6,6,8,8,192)f32 prim::Constant pnnx_11163 0 1 23523 value=1 prim::Constant pnnx_11164 0 1 23524 value=-1 prim::ListConstruct pnnx_11165 4 1 23523 962 1202 23524 11249 torch.permute torch.permute_2747 2 1 x7.39 11246 11247 $input=x7.39 $dims=11246 #x7.39=(1,6,6,8,8,192)f32 #11247=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_166 1 1 11247 11248 memory_format=torch.contiguous_format $input=11247 #11247=(1,6,8,6,8,192)f32 #11248=(1,6,8,6,8,192)f32 aten::mul pnnx_11167 2 1 H0.1 W0.1 11251 aten::Int pnnx_11168 1 1 11251 11252 prim::ListConstruct pnnx_11169 3 1 11141 11252 11145 11253 prim::Constant pnnx_11171 0 1 11255 value=None prim::Constant pnnx_11172 0 1 23525 value=1 Tensor.view Tensor.view_1586 2 1 11248 11249 x8.39 $input=11248 $shape=11249 #11248=(1,6,8,6,8,192)f32 #x8.39=(1,48,48,192)f32 Tensor.view Tensor.view_1587 2 1 x8.39 11253 x9.39 $input=x8.39 $shape=11253 #x8.39=(1,48,48,192)f32 #x9.39=(1,2304,192)f32 aten::add pnnx_11173 3 1 11105 x9.39 23525 input.251 #11105=(1,2304,192)f32 #x9.39=(1,2304,192)f32 #input.251=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.0.norm2 1 1 input.251 11257 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.251=(1,2304,192)f32 #11257=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.0.mlp.fc1 1 1 11257 11262 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #11257=(1,2304,192)f32 #11262=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.0.mlp.act 1 1 11262 11263 #11262=(1,2304,384)f32 #11263=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.mlp.drop 1 1 11263 11264 #11263=(1,2304,384)f32 #11264=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.0.mlp.fc2 1 1 11264 11265 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #11264=(1,2304,384)f32 #11265=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.0.mlp.drop 1 1 11265 11266 #11265=(1,2304,192)f32 #11266=(1,2304,192)f32 prim::Constant pnnx_11174 0 1 11267 value=None prim::Constant pnnx_11175 0 1 23526 value=1 aten::add pnnx_11176 3 1 input.251 11266 23526 11268 #input.251=(1,2304,192)f32 #11266=(1,2304,192)f32 #11268=(1,2304,192)f32 prim::Constant pnnx_11177 0 1 11269 value=trunc prim::Constant pnnx_11178 0 1 11270 value=8 prim::Constant pnnx_11179 0 1 11271 value=0 prim::Constant pnnx_11180 0 1 11272 value=2 prim::Constant pnnx_11181 0 1 11273 value=-4 prim::Constant pnnx_11182 0 1 11274 value=1 prim::Constant pnnx_11183 0 1 11275 value=3 prim::Constant pnnx_11184 0 1 11276 value=8 prim::Constant pnnx_11185 0 1 11277 value=4 prim::Constant pnnx_11186 0 1 11278 value=5 prim::Constant pnnx_11187 0 1 11279 value=-1 prim::Constant pnnx_11188 0 1 11280 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.1 0 1 attn_mask.57 @attn_mask=(36,64,64)f32 #attn_mask.57=(36,64,64)f32 aten::size pnnx_11189 2 1 11268 11271 11287 #11268=(1,2304,192)f32 prim::NumToTensor pnnx_11190 1 1 11287 B.135 aten::Int pnnx_11191 1 1 B.135 11289 aten::Int pnnx_11192 1 1 B.135 11290 aten::size pnnx_11193 2 1 11268 11272 11291 #11268=(1,2304,192)f32 prim::NumToTensor pnnx_11194 1 1 11291 C.231 aten::Int pnnx_11195 1 1 C.231 11293 aten::Int pnnx_11196 1 1 C.231 11294 aten::Int pnnx_11197 1 1 C.231 11295 aten::Int pnnx_11198 1 1 C.231 11296 nn.LayerNorm layers_dfe.3.residual_group.blocks.1.norm1 1 1 11268 11297 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #11268=(1,2304,192)f32 #11297=(1,2304,192)f32 prim::ListConstruct pnnx_11199 4 1 11290 959 1199 11296 11298 prim::Constant pnnx_11201 0 1 23527 value=-4 prim::ListConstruct pnnx_11202 2 1 11273 23527 11300 prim::Constant pnnx_11203 0 1 23528 value=2 prim::ListConstruct pnnx_11204 2 1 11274 23528 11301 Tensor.view Tensor.view_1588 2 1 11297 11298 x.113 $input=11297 $shape=11298 #11297=(1,2304,192)f32 #x.113=(1,48,48,192)f32 prim::Constant pnnx_11206 0 1 23529 value=0 torch.roll torch.roll_2474 3 1 x.113 11300 11301 x6.41 $input=x.113 $shifts=11300 $dims=11301 #x.113=(1,48,48,192)f32 #x6.41=(1,48,48,192)f32 aten::size pnnx_11207 2 1 x6.41 23529 11303 #x6.41=(1,48,48,192)f32 prim::NumToTensor pnnx_11208 1 1 11303 B1.41 aten::Int pnnx_11209 1 1 B1.41 11305 prim::Constant pnnx_11210 0 1 23530 value=1 aten::size pnnx_11211 2 1 x6.41 23530 11306 #x6.41=(1,48,48,192)f32 prim::NumToTensor pnnx_11212 1 1 11306 11307 prim::Constant pnnx_11213 0 1 23531 value=2 aten::size pnnx_11214 2 1 x6.41 23531 11308 #x6.41=(1,48,48,192)f32 prim::NumToTensor pnnx_11215 1 1 11308 11309 aten::size pnnx_11216 2 1 x6.41 11275 11310 #x6.41=(1,48,48,192)f32 prim::NumToTensor pnnx_11217 1 1 11310 C1.41 aten::Int pnnx_11218 1 1 C1.41 11312 aten::Int pnnx_11219 1 1 C1.41 11313 aten::div pnnx_11220 3 1 11307 11270 11269 11314 aten::Int pnnx_11221 1 1 11314 11315 prim::Constant pnnx_11222 0 1 23532 value=8 prim::Constant pnnx_11223 0 1 23533 value=trunc aten::div pnnx_11224 3 1 11309 23532 23533 11316 aten::Int pnnx_11225 1 1 11316 11317 prim::Constant pnnx_11226 0 1 23534 value=8 prim::ListConstruct pnnx_11227 6 1 11305 11315 11276 11317 23534 11313 11318 prim::Constant pnnx_11229 0 1 23535 value=0 prim::Constant pnnx_11230 0 1 23536 value=1 prim::Constant pnnx_11231 0 1 23537 value=3 prim::Constant pnnx_11232 0 1 23538 value=2 prim::ListConstruct pnnx_11233 6 1 23535 23536 23537 23538 11277 11278 11320 Tensor.view Tensor.view_1589 2 1 x6.41 11318 x7.41 $input=x6.41 $shape=11318 #x6.41=(1,48,48,192)f32 #x7.41=(1,6,8,6,8,192)f32 prim::Constant pnnx_11237 0 1 23540 value=8 prim::Constant pnnx_11238 0 1 23541 value=8 prim::ListConstruct pnnx_11239 4 1 11279 23540 23541 11312 11323 torch.permute torch.permute_2748 2 1 x7.41 11320 11321 $input=x7.41 $dims=11320 #x7.41=(1,6,8,6,8,192)f32 #11321=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_167 1 1 11321 11322 memory_format=torch.contiguous_format $input=11321 #11321=(1,6,6,8,8,192)f32 #11322=(1,6,6,8,8,192)f32 prim::Constant pnnx_11241 0 1 23542 value=-1 prim::ListConstruct pnnx_11242 3 1 23542 11280 11295 11325 prim::Constant pnnx_11244 0 1 11327 value=1.767767e-01 prim::Constant pnnx_11245 0 1 11328 value=trunc prim::Constant pnnx_11246 0 1 11329 value=6 prim::Constant pnnx_11247 0 1 11330 value=0 prim::Constant pnnx_11248 0 1 11331 value=1 prim::Constant pnnx_11249 0 1 11332 value=2 prim::Constant pnnx_11250 0 1 11333 value=3 prim::Constant pnnx_11251 0 1 11334 value=6 prim::Constant pnnx_11252 0 1 11335 value=4 prim::Constant pnnx_11253 0 1 11336 value=-2 prim::Constant pnnx_11254 0 1 11337 value=-1 prim::Constant pnnx_11255 0 1 11338 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.1.attn 0 1 relative_position_bias_table.113 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.113=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.1.attn 0 1 relative_position_index.113 @relative_position_index=(64,64)i64 #relative_position_index.113=(64,64)i64 Tensor.view Tensor.view_1590 2 1 11322 11323 x_windows.113 $input=11322 $shape=11323 #11322=(1,6,6,8,8,192)f32 #x_windows.113=(36,8,8,192)f32 Tensor.view Tensor.view_1591 2 1 x_windows.113 11325 x8.41 $input=x_windows.113 $shape=11325 #x_windows.113=(36,8,8,192)f32 #x8.41=(36,64,192)f32 aten::size pnnx_11256 2 1 x8.41 11330 11346 #x8.41=(36,64,192)f32 prim::NumToTensor pnnx_11257 1 1 11346 B_.113 aten::Int pnnx_11258 1 1 B_.113 11348 aten::Int pnnx_11259 1 1 B_.113 11349 aten::size pnnx_11260 2 1 x8.41 11331 11350 #x8.41=(36,64,192)f32 prim::NumToTensor pnnx_11261 1 1 11350 N.113 aten::Int pnnx_11262 1 1 N.113 11352 aten::Int pnnx_11263 1 1 N.113 11353 aten::Int pnnx_11264 1 1 N.113 11354 aten::Int pnnx_11265 1 1 N.113 11355 aten::Int pnnx_11266 1 1 N.113 11356 aten::Int pnnx_11267 1 1 N.113 11357 aten::size pnnx_11268 2 1 x8.41 11332 11358 #x8.41=(36,64,192)f32 prim::NumToTensor pnnx_11269 1 1 11358 C.233 aten::Int pnnx_11270 1 1 C.233 11360 nn.Linear layers_dfe.3.residual_group.blocks.1.attn.qkv 1 1 x8.41 11361 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.41=(36,64,192)f32 #11361=(36,64,576)f32 aten::div pnnx_11271 3 1 C.233 11329 11328 11362 aten::Int pnnx_11272 1 1 11362 11363 prim::ListConstruct pnnx_11273 5 1 11349 11357 11333 11334 11363 11364 prim::Constant pnnx_11275 0 1 23543 value=2 prim::Constant pnnx_11276 0 1 23544 value=0 prim::Constant pnnx_11277 0 1 23545 value=3 prim::Constant pnnx_11278 0 1 23546 value=1 prim::ListConstruct pnnx_11279 5 1 23543 23544 23545 23546 11335 11366 Tensor.reshape Tensor.reshape_544 2 1 11361 11364 11365 $input=11361 $shape=11364 #11361=(36,64,576)f32 #11365=(36,64,3,6,32)f32 prim::Constant pnnx_11281 0 1 23547 value=0 prim::Constant pnnx_11282 0 1 23548 value=0 prim::Constant pnnx_11284 0 1 23549 value=0 prim::Constant pnnx_11285 0 1 23550 value=1 prim::Constant pnnx_11287 0 1 23551 value=0 prim::Constant pnnx_11288 0 1 23552 value=2 torch.permute torch.permute_2749 2 1 11365 11366 qkv1.41 $input=11365 $dims=11366 #11365=(36,64,3,6,32)f32 #qkv1.41=(3,36,6,64,32)f32 Tensor.select Tensor.select_815 3 1 qkv1.41 23547 23548 q.113 $input=qkv1.41 $dim=23547 $index=23548 #qkv1.41=(3,36,6,64,32)f32 #q.113=(36,6,64,32)f32 aten::mul pnnx_11290 2 1 q.113 11327 q1.41 #q.113=(36,6,64,32)f32 #q1.41=(36,6,64,32)f32 Tensor.select Tensor.select_816 3 1 qkv1.41 23549 23550 k.113 $input=qkv1.41 $dim=23549 $index=23550 #qkv1.41=(3,36,6,64,32)f32 #k.113=(36,6,64,32)f32 prim::Constant pnnx_11293 0 1 23553 value=-1 prim::ListConstruct pnnx_11294 1 1 23553 11374 Tensor.view Tensor.view_1592 2 1 relative_position_index.113 11374 11375 $input=relative_position_index.113 $shape=11374 #relative_position_index.113=(64,64)i64 #11375=(4096)i64 prim::ListConstruct pnnx_11296 1 1 11375 11376 #11375=(4096)i64 prim::Constant pnnx_11298 0 1 23554 value=64 prim::Constant pnnx_11299 0 1 23555 value=-1 prim::ListConstruct pnnx_11300 3 1 11338 23554 23555 11378 Tensor.index Tensor.index_381 2 1 relative_position_bias_table.113 11376 11377 $input=relative_position_bias_table.113 $expr=11376 #relative_position_bias_table.113=(225,6)f32 #11377=(4096,6)f32 prim::Constant pnnx_11302 0 1 23556 value=2 prim::Constant pnnx_11303 0 1 23557 value=0 prim::Constant pnnx_11304 0 1 23558 value=1 prim::ListConstruct pnnx_11305 3 1 23556 23557 23558 11380 Tensor.view Tensor.view_1593 2 1 11377 11378 relative_position_bias.113 $input=11377 $shape=11378 #11377=(4096,6)f32 #relative_position_bias.113=(64,64,6)f32 prim::Constant pnnx_11309 0 1 23560 value=0 torch.permute torch.permute_2750 2 1 relative_position_bias.113 11380 11381 $input=relative_position_bias.113 $dims=11380 #relative_position_bias.113=(64,64,6)f32 #11381=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_168 1 1 11381 relative_position_bias1.41 memory_format=torch.contiguous_format $input=11381 #11381=(6,64,64)f32 #relative_position_bias1.41=(6,64,64)f32 prim::Constant pnnx_11311 0 1 23561 value=1 torch.transpose torch.transpose_3091 3 1 k.113 11336 11337 11372 $input=k.113 $dim0=11336 $dim1=11337 #k.113=(36,6,64,32)f32 #11372=(36,6,32,64)f32 torch.matmul torch.matmul_2314 2 1 q1.41 11372 attn.227 $input=q1.41 $other=11372 #q1.41=(36,6,64,32)f32 #11372=(36,6,32,64)f32 #attn.227=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3327 2 1 relative_position_bias1.41 23560 11383 $input=relative_position_bias1.41 $dim=23560 #relative_position_bias1.41=(6,64,64)f32 #11383=(1,6,64,64)f32 aten::add pnnx_11312 3 1 attn.227 11383 23561 attn2.21 #attn.227=(36,6,64,64)f32 #11383=(1,6,64,64)f32 #attn2.21=(36,6,64,64)f32 prim::Constant pnnx_11313 0 1 23562 value=0 aten::size pnnx_11314 2 1 attn_mask.57 23562 11385 #attn_mask.57=(36,64,64)f32 prim::NumToTensor pnnx_11315 1 1 11385 other.57 aten::Int pnnx_11316 1 1 other.57 11387 prim::Constant pnnx_11317 0 1 23563 value=trunc aten::div pnnx_11318 3 1 B_.113 other.57 23563 11388 aten::Int pnnx_11319 1 1 11388 11389 prim::Constant pnnx_11320 0 1 23564 value=6 prim::ListConstruct pnnx_11321 5 1 11389 11387 23564 11356 11355 11390 prim::Constant pnnx_11323 0 1 23565 value=1 prim::Constant pnnx_11325 0 1 23566 value=0 prim::Constant pnnx_11327 0 1 23567 value=1 Tensor.view Tensor.view_1594 2 1 attn2.21 11390 11391 $input=attn2.21 $shape=11390 #attn2.21=(36,6,64,64)f32 #11391=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3328 2 1 attn_mask.57 23565 11392 $input=attn_mask.57 $dim=23565 #attn_mask.57=(36,64,64)f32 #11392=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3329 2 1 11392 23566 11393 $input=11392 $dim=23566 #11392=(36,1,64,64)f32 #11393=(1,36,1,64,64)f32 aten::add pnnx_11328 3 1 11391 11393 23567 attn3.21 #11391=(1,36,6,64,64)f32 #11393=(1,36,1,64,64)f32 #attn3.21=(1,36,6,64,64)f32 prim::Constant pnnx_11329 0 1 23568 value=-1 prim::Constant pnnx_11330 0 1 23569 value=6 prim::ListConstruct pnnx_11331 4 1 23568 23569 11354 11353 11395 Tensor.view Tensor.view_1595 2 1 attn3.21 11395 input.253 $input=attn3.21 $shape=11395 #attn3.21=(1,36,6,64,64)f32 #input.253=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.1.attn.softmax 1 1 input.253 11397 dim=-1 #input.253=(36,6,64,64)f32 #11397=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.attn.attn_drop 1 1 11397 11398 #11397=(36,6,64,64)f32 #11398=(36,6,64,64)f32 Tensor.select Tensor.select_817 3 1 qkv1.41 23551 23552 v.113 $input=qkv1.41 $dim=23551 $index=23552 #qkv1.41=(3,36,6,64,32)f32 #v.113=(36,6,64,32)f32 prim::Constant pnnx_11334 0 1 23570 value=1 prim::Constant pnnx_11335 0 1 23571 value=2 torch.matmul torch.matmul_2315 2 1 11398 v.113 11399 $input=11398 $other=v.113 #11398=(36,6,64,64)f32 #v.113=(36,6,64,32)f32 #11399=(36,6,64,32)f32 prim::ListConstruct pnnx_11337 3 1 11348 11352 11360 11401 torch.transpose torch.transpose_3092 3 1 11399 23570 23571 11400 $input=11399 $dim0=23570 $dim1=23571 #11399=(36,6,64,32)f32 #11400=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_545 2 1 11400 11401 input1.43 $input=11400 $shape=11401 #11400=(36,64,6,32)f32 #input1.43=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.1.attn.proj 1 1 input1.43 11403 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.43=(36,64,192)f32 #11403=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.attn.proj_drop 1 1 11403 11404 #11403=(36,64,192)f32 #11404=(36,64,192)f32 prim::Constant pnnx_11339 0 1 23572 value=-1 prim::Constant pnnx_11340 0 1 23573 value=8 prim::Constant pnnx_11341 0 1 23574 value=8 prim::ListConstruct pnnx_11342 4 1 23572 23573 23574 11294 11405 prim::Constant pnnx_11344 0 1 23575 value=8 prim::Constant pnnx_11345 0 1 23576 value=trunc aten::div pnnx_11346 3 1 H0.1 23575 23576 11407 aten::Int pnnx_11347 1 1 11407 11408 prim::Constant pnnx_11348 0 1 23577 value=8 prim::Constant pnnx_11349 0 1 23578 value=trunc aten::div pnnx_11350 3 1 W0.1 23577 23578 11409 aten::Int pnnx_11351 1 1 11409 11410 prim::Constant pnnx_11352 0 1 23579 value=1 prim::Constant pnnx_11353 0 1 23580 value=8 prim::Constant pnnx_11354 0 1 23581 value=8 prim::Constant pnnx_11355 0 1 23582 value=-1 prim::ListConstruct pnnx_11356 6 1 23579 11408 11410 23580 23581 23582 11411 prim::Constant pnnx_11358 0 1 23583 value=0 prim::Constant pnnx_11359 0 1 23584 value=1 prim::Constant pnnx_11360 0 1 23585 value=3 prim::Constant pnnx_11361 0 1 23586 value=2 prim::Constant pnnx_11362 0 1 23587 value=4 prim::Constant pnnx_11363 0 1 23588 value=5 prim::ListConstruct pnnx_11364 6 1 23583 23584 23585 23586 23587 23588 11413 Tensor.view Tensor.view_1596 2 1 11404 11405 windows.113 $input=11404 $shape=11405 #11404=(36,64,192)f32 #windows.113=(36,8,8,192)f32 Tensor.view Tensor.view_1597 2 1 windows.113 11411 x9.41 $input=windows.113 $shape=11411 #windows.113=(36,8,8,192)f32 #x9.41=(1,6,6,8,8,192)f32 prim::Constant pnnx_11368 0 1 23590 value=1 prim::Constant pnnx_11369 0 1 23591 value=-1 prim::ListConstruct pnnx_11370 4 1 23590 956 1196 23591 11416 torch.permute torch.permute_2751 2 1 x9.41 11413 11414 $input=x9.41 $dims=11413 #x9.41=(1,6,6,8,8,192)f32 #11414=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_169 1 1 11414 11415 memory_format=torch.contiguous_format $input=11414 #11414=(1,6,8,6,8,192)f32 #11415=(1,6,8,6,8,192)f32 prim::Constant pnnx_11372 0 1 23592 value=4 prim::Constant pnnx_11373 0 1 23593 value=4 prim::ListConstruct pnnx_11374 2 1 23592 23593 11418 prim::Constant pnnx_11375 0 1 23594 value=1 prim::Constant pnnx_11376 0 1 23595 value=2 prim::ListConstruct pnnx_11377 2 1 23594 23595 11419 Tensor.view Tensor.view_1598 2 1 11415 11416 shifted_x.57 $input=11415 $shape=11416 #11415=(1,6,8,6,8,192)f32 #shifted_x.57=(1,48,48,192)f32 aten::mul pnnx_11379 2 1 H0.1 W0.1 11421 aten::Int pnnx_11380 1 1 11421 11422 prim::ListConstruct pnnx_11381 3 1 11289 11422 11293 11423 prim::Constant pnnx_11383 0 1 11425 value=None prim::Constant pnnx_11384 0 1 23596 value=1 torch.roll torch.roll_2475 3 1 shifted_x.57 11418 11419 x10.21 $input=shifted_x.57 $shifts=11418 $dims=11419 #shifted_x.57=(1,48,48,192)f32 #x10.21=(1,48,48,192)f32 Tensor.view Tensor.view_1599 2 1 x10.21 11423 x11.21 $input=x10.21 $shape=11423 #x10.21=(1,48,48,192)f32 #x11.21=(1,2304,192)f32 aten::add pnnx_11385 3 1 11268 x11.21 23596 input.255 #11268=(1,2304,192)f32 #x11.21=(1,2304,192)f32 #input.255=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.1.norm2 1 1 input.255 11427 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.255=(1,2304,192)f32 #11427=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.1.mlp.fc1 1 1 11427 11432 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #11427=(1,2304,192)f32 #11432=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.1.mlp.act 1 1 11432 11433 #11432=(1,2304,384)f32 #11433=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.mlp.drop 1 1 11433 11434 #11433=(1,2304,384)f32 #11434=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.1.mlp.fc2 1 1 11434 11435 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #11434=(1,2304,384)f32 #11435=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.1.mlp.drop 1 1 11435 11436 #11435=(1,2304,192)f32 #11436=(1,2304,192)f32 prim::Constant pnnx_11386 0 1 11437 value=None prim::Constant pnnx_11387 0 1 23597 value=1 aten::add pnnx_11388 3 1 input.255 11436 23597 11438 #input.255=(1,2304,192)f32 #11436=(1,2304,192)f32 #11438=(1,2304,192)f32 prim::Constant pnnx_11389 0 1 11439 value=trunc prim::Constant pnnx_11390 0 1 11440 value=8 prim::Constant pnnx_11391 0 1 11441 value=0 prim::Constant pnnx_11392 0 1 11442 value=2 prim::Constant pnnx_11393 0 1 11443 value=1 prim::Constant pnnx_11394 0 1 11444 value=3 prim::Constant pnnx_11395 0 1 11445 value=8 prim::Constant pnnx_11396 0 1 11446 value=4 prim::Constant pnnx_11397 0 1 11447 value=5 prim::Constant pnnx_11398 0 1 11448 value=-1 prim::Constant pnnx_11399 0 1 11449 value=64 aten::size pnnx_11400 2 1 11438 11441 11455 #11438=(1,2304,192)f32 prim::NumToTensor pnnx_11401 1 1 11455 B.137 aten::Int pnnx_11402 1 1 B.137 11457 aten::Int pnnx_11403 1 1 B.137 11458 aten::size pnnx_11404 2 1 11438 11442 11459 #11438=(1,2304,192)f32 prim::NumToTensor pnnx_11405 1 1 11459 C.235 aten::Int pnnx_11406 1 1 C.235 11461 aten::Int pnnx_11407 1 1 C.235 11462 aten::Int pnnx_11408 1 1 C.235 11463 aten::Int pnnx_11409 1 1 C.235 11464 nn.LayerNorm layers_dfe.3.residual_group.blocks.2.norm1 1 1 11438 11465 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #11438=(1,2304,192)f32 #11465=(1,2304,192)f32 prim::ListConstruct pnnx_11410 4 1 11458 953 1193 11464 11466 prim::Constant pnnx_11412 0 1 23598 value=0 Tensor.view Tensor.view_1600 2 1 11465 11466 x.115 $input=11465 $shape=11466 #11465=(1,2304,192)f32 #x.115=(1,48,48,192)f32 aten::size pnnx_11413 2 1 x.115 23598 11468 #x.115=(1,48,48,192)f32 prim::NumToTensor pnnx_11414 1 1 11468 B1.43 aten::Int pnnx_11415 1 1 B1.43 11470 aten::size pnnx_11416 2 1 x.115 11443 11471 #x.115=(1,48,48,192)f32 prim::NumToTensor pnnx_11417 1 1 11471 11472 prim::Constant pnnx_11418 0 1 23599 value=2 aten::size pnnx_11419 2 1 x.115 23599 11473 #x.115=(1,48,48,192)f32 prim::NumToTensor pnnx_11420 1 1 11473 11474 aten::size pnnx_11421 2 1 x.115 11444 11475 #x.115=(1,48,48,192)f32 prim::NumToTensor pnnx_11422 1 1 11475 C1.43 aten::Int pnnx_11423 1 1 C1.43 11477 aten::Int pnnx_11424 1 1 C1.43 11478 aten::div pnnx_11425 3 1 11472 11440 11439 11479 aten::Int pnnx_11426 1 1 11479 11480 prim::Constant pnnx_11427 0 1 23600 value=8 prim::Constant pnnx_11428 0 1 23601 value=trunc aten::div pnnx_11429 3 1 11474 23600 23601 11481 aten::Int pnnx_11430 1 1 11481 11482 prim::Constant pnnx_11431 0 1 23602 value=8 prim::ListConstruct pnnx_11432 6 1 11470 11480 11445 11482 23602 11478 11483 prim::Constant pnnx_11434 0 1 23603 value=0 prim::Constant pnnx_11435 0 1 23604 value=1 prim::Constant pnnx_11436 0 1 23605 value=3 prim::Constant pnnx_11437 0 1 23606 value=2 prim::ListConstruct pnnx_11438 6 1 23603 23604 23605 23606 11446 11447 11485 Tensor.view Tensor.view_1601 2 1 x.115 11483 x5.59 $input=x.115 $shape=11483 #x.115=(1,48,48,192)f32 #x5.59=(1,6,8,6,8,192)f32 prim::Constant pnnx_11442 0 1 23608 value=8 prim::Constant pnnx_11443 0 1 23609 value=8 prim::ListConstruct pnnx_11444 4 1 11448 23608 23609 11477 11488 torch.permute torch.permute_2752 2 1 x5.59 11485 11486 $input=x5.59 $dims=11485 #x5.59=(1,6,8,6,8,192)f32 #11486=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_170 1 1 11486 11487 memory_format=torch.contiguous_format $input=11486 #11486=(1,6,6,8,8,192)f32 #11487=(1,6,6,8,8,192)f32 prim::Constant pnnx_11446 0 1 23610 value=-1 prim::ListConstruct pnnx_11447 3 1 23610 11449 11463 11490 prim::Constant pnnx_11449 0 1 11492 value=1.767767e-01 prim::Constant pnnx_11450 0 1 11493 value=trunc prim::Constant pnnx_11451 0 1 11494 value=6 prim::Constant pnnx_11452 0 1 11495 value=0 prim::Constant pnnx_11453 0 1 11496 value=1 prim::Constant pnnx_11454 0 1 11497 value=2 prim::Constant pnnx_11455 0 1 11498 value=3 prim::Constant pnnx_11456 0 1 11499 value=6 prim::Constant pnnx_11457 0 1 11500 value=4 prim::Constant pnnx_11458 0 1 11501 value=-2 prim::Constant pnnx_11459 0 1 11502 value=-1 prim::Constant pnnx_11460 0 1 11503 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.2.attn 0 1 relative_position_bias_table.115 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.115=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.2.attn 0 1 relative_position_index.115 @relative_position_index=(64,64)i64 #relative_position_index.115=(64,64)i64 Tensor.view Tensor.view_1602 2 1 11487 11488 x_windows.115 $input=11487 $shape=11488 #11487=(1,6,6,8,8,192)f32 #x_windows.115=(36,8,8,192)f32 Tensor.view Tensor.view_1603 2 1 x_windows.115 11490 x6.43 $input=x_windows.115 $shape=11490 #x_windows.115=(36,8,8,192)f32 #x6.43=(36,64,192)f32 aten::size pnnx_11461 2 1 x6.43 11495 11511 #x6.43=(36,64,192)f32 prim::NumToTensor pnnx_11462 1 1 11511 B_.115 aten::Int pnnx_11463 1 1 B_.115 11513 aten::Int pnnx_11464 1 1 B_.115 11514 aten::size pnnx_11465 2 1 x6.43 11496 11515 #x6.43=(36,64,192)f32 prim::NumToTensor pnnx_11466 1 1 11515 N.115 aten::Int pnnx_11467 1 1 N.115 11517 aten::Int pnnx_11468 1 1 N.115 11518 aten::size pnnx_11469 2 1 x6.43 11497 11519 #x6.43=(36,64,192)f32 prim::NumToTensor pnnx_11470 1 1 11519 C.237 aten::Int pnnx_11471 1 1 C.237 11521 nn.Linear layers_dfe.3.residual_group.blocks.2.attn.qkv 1 1 x6.43 11522 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.43=(36,64,192)f32 #11522=(36,64,576)f32 aten::div pnnx_11472 3 1 C.237 11494 11493 11523 aten::Int pnnx_11473 1 1 11523 11524 prim::ListConstruct pnnx_11474 5 1 11514 11518 11498 11499 11524 11525 prim::Constant pnnx_11476 0 1 23611 value=2 prim::Constant pnnx_11477 0 1 23612 value=0 prim::Constant pnnx_11478 0 1 23613 value=3 prim::Constant pnnx_11479 0 1 23614 value=1 prim::ListConstruct pnnx_11480 5 1 23611 23612 23613 23614 11500 11527 Tensor.reshape Tensor.reshape_546 2 1 11522 11525 11526 $input=11522 $shape=11525 #11522=(36,64,576)f32 #11526=(36,64,3,6,32)f32 prim::Constant pnnx_11482 0 1 23615 value=0 prim::Constant pnnx_11483 0 1 23616 value=0 prim::Constant pnnx_11485 0 1 23617 value=0 prim::Constant pnnx_11486 0 1 23618 value=1 prim::Constant pnnx_11488 0 1 23619 value=0 prim::Constant pnnx_11489 0 1 23620 value=2 torch.permute torch.permute_2753 2 1 11526 11527 qkv1.43 $input=11526 $dims=11527 #11526=(36,64,3,6,32)f32 #qkv1.43=(3,36,6,64,32)f32 Tensor.select Tensor.select_818 3 1 qkv1.43 23615 23616 q.115 $input=qkv1.43 $dim=23615 $index=23616 #qkv1.43=(3,36,6,64,32)f32 #q.115=(36,6,64,32)f32 aten::mul pnnx_11491 2 1 q.115 11492 q1.43 #q.115=(36,6,64,32)f32 #q1.43=(36,6,64,32)f32 Tensor.select Tensor.select_819 3 1 qkv1.43 23617 23618 k.115 $input=qkv1.43 $dim=23617 $index=23618 #qkv1.43=(3,36,6,64,32)f32 #k.115=(36,6,64,32)f32 prim::Constant pnnx_11494 0 1 23621 value=-1 prim::ListConstruct pnnx_11495 1 1 23621 11535 Tensor.view Tensor.view_1604 2 1 relative_position_index.115 11535 11536 $input=relative_position_index.115 $shape=11535 #relative_position_index.115=(64,64)i64 #11536=(4096)i64 prim::ListConstruct pnnx_11497 1 1 11536 11537 #11536=(4096)i64 prim::Constant pnnx_11499 0 1 23622 value=64 prim::Constant pnnx_11500 0 1 23623 value=-1 prim::ListConstruct pnnx_11501 3 1 11503 23622 23623 11539 Tensor.index Tensor.index_382 2 1 relative_position_bias_table.115 11537 11538 $input=relative_position_bias_table.115 $expr=11537 #relative_position_bias_table.115=(225,6)f32 #11538=(4096,6)f32 prim::Constant pnnx_11503 0 1 23624 value=2 prim::Constant pnnx_11504 0 1 23625 value=0 prim::Constant pnnx_11505 0 1 23626 value=1 prim::ListConstruct pnnx_11506 3 1 23624 23625 23626 11541 Tensor.view Tensor.view_1605 2 1 11538 11539 relative_position_bias.115 $input=11538 $shape=11539 #11538=(4096,6)f32 #relative_position_bias.115=(64,64,6)f32 prim::Constant pnnx_11510 0 1 23628 value=0 torch.permute torch.permute_2754 2 1 relative_position_bias.115 11541 11542 $input=relative_position_bias.115 $dims=11541 #relative_position_bias.115=(64,64,6)f32 #11542=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_171 1 1 11542 relative_position_bias1.43 memory_format=torch.contiguous_format $input=11542 #11542=(6,64,64)f32 #relative_position_bias1.43=(6,64,64)f32 prim::Constant pnnx_11512 0 1 23629 value=1 torch.transpose torch.transpose_3093 3 1 k.115 11501 11502 11533 $input=k.115 $dim0=11501 $dim1=11502 #k.115=(36,6,64,32)f32 #11533=(36,6,32,64)f32 torch.matmul torch.matmul_2316 2 1 q1.43 11533 attn.231 $input=q1.43 $other=11533 #q1.43=(36,6,64,32)f32 #11533=(36,6,32,64)f32 #attn.231=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3330 2 1 relative_position_bias1.43 23628 11544 $input=relative_position_bias1.43 $dim=23628 #relative_position_bias1.43=(6,64,64)f32 #11544=(1,6,64,64)f32 aten::add pnnx_11513 3 1 attn.231 11544 23629 input.257 #attn.231=(36,6,64,64)f32 #11544=(1,6,64,64)f32 #input.257=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.2.attn.softmax 1 1 input.257 11546 dim=-1 #input.257=(36,6,64,64)f32 #11546=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.attn.attn_drop 1 1 11546 11547 #11546=(36,6,64,64)f32 #11547=(36,6,64,64)f32 Tensor.select Tensor.select_820 3 1 qkv1.43 23619 23620 v.115 $input=qkv1.43 $dim=23619 $index=23620 #qkv1.43=(3,36,6,64,32)f32 #v.115=(36,6,64,32)f32 prim::Constant pnnx_11515 0 1 23630 value=1 prim::Constant pnnx_11516 0 1 23631 value=2 torch.matmul torch.matmul_2317 2 1 11547 v.115 11548 $input=11547 $other=v.115 #11547=(36,6,64,64)f32 #v.115=(36,6,64,32)f32 #11548=(36,6,64,32)f32 prim::ListConstruct pnnx_11518 3 1 11513 11517 11521 11550 torch.transpose torch.transpose_3094 3 1 11548 23630 23631 11549 $input=11548 $dim0=23630 $dim1=23631 #11548=(36,6,64,32)f32 #11549=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_547 2 1 11549 11550 input1.45 $input=11549 $shape=11550 #11549=(36,64,6,32)f32 #input1.45=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.2.attn.proj 1 1 input1.45 11552 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.45=(36,64,192)f32 #11552=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.attn.proj_drop 1 1 11552 11553 #11552=(36,64,192)f32 #11553=(36,64,192)f32 prim::Constant pnnx_11520 0 1 23632 value=-1 prim::Constant pnnx_11521 0 1 23633 value=8 prim::Constant pnnx_11522 0 1 23634 value=8 prim::ListConstruct pnnx_11523 4 1 23632 23633 23634 11462 11554 prim::Constant pnnx_11525 0 1 23635 value=8 prim::Constant pnnx_11526 0 1 23636 value=trunc aten::div pnnx_11527 3 1 H0.1 23635 23636 11556 aten::Int pnnx_11528 1 1 11556 11557 prim::Constant pnnx_11529 0 1 23637 value=8 prim::Constant pnnx_11530 0 1 23638 value=trunc aten::div pnnx_11531 3 1 W0.1 23637 23638 11558 aten::Int pnnx_11532 1 1 11558 11559 prim::Constant pnnx_11533 0 1 23639 value=1 prim::Constant pnnx_11534 0 1 23640 value=8 prim::Constant pnnx_11535 0 1 23641 value=8 prim::Constant pnnx_11536 0 1 23642 value=-1 prim::ListConstruct pnnx_11537 6 1 23639 11557 11559 23640 23641 23642 11560 prim::Constant pnnx_11539 0 1 23643 value=0 prim::Constant pnnx_11540 0 1 23644 value=1 prim::Constant pnnx_11541 0 1 23645 value=3 prim::Constant pnnx_11542 0 1 23646 value=2 prim::Constant pnnx_11543 0 1 23647 value=4 prim::Constant pnnx_11544 0 1 23648 value=5 prim::ListConstruct pnnx_11545 6 1 23643 23644 23645 23646 23647 23648 11562 Tensor.view Tensor.view_1606 2 1 11553 11554 windows.115 $input=11553 $shape=11554 #11553=(36,64,192)f32 #windows.115=(36,8,8,192)f32 Tensor.view Tensor.view_1607 2 1 windows.115 11560 x7.43 $input=windows.115 $shape=11560 #windows.115=(36,8,8,192)f32 #x7.43=(1,6,6,8,8,192)f32 prim::Constant pnnx_11549 0 1 23650 value=1 prim::Constant pnnx_11550 0 1 23651 value=-1 prim::ListConstruct pnnx_11551 4 1 23650 950 1190 23651 11565 torch.permute torch.permute_2755 2 1 x7.43 11562 11563 $input=x7.43 $dims=11562 #x7.43=(1,6,6,8,8,192)f32 #11563=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_172 1 1 11563 11564 memory_format=torch.contiguous_format $input=11563 #11563=(1,6,8,6,8,192)f32 #11564=(1,6,8,6,8,192)f32 aten::mul pnnx_11553 2 1 H0.1 W0.1 11567 aten::Int pnnx_11554 1 1 11567 11568 prim::ListConstruct pnnx_11555 3 1 11457 11568 11461 11569 prim::Constant pnnx_11557 0 1 11571 value=None prim::Constant pnnx_11558 0 1 23652 value=1 Tensor.view Tensor.view_1608 2 1 11564 11565 x8.43 $input=11564 $shape=11565 #11564=(1,6,8,6,8,192)f32 #x8.43=(1,48,48,192)f32 Tensor.view Tensor.view_1609 2 1 x8.43 11569 x9.43 $input=x8.43 $shape=11569 #x8.43=(1,48,48,192)f32 #x9.43=(1,2304,192)f32 aten::add pnnx_11559 3 1 11438 x9.43 23652 input.259 #11438=(1,2304,192)f32 #x9.43=(1,2304,192)f32 #input.259=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.2.norm2 1 1 input.259 11573 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.259=(1,2304,192)f32 #11573=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.2.mlp.fc1 1 1 11573 11578 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #11573=(1,2304,192)f32 #11578=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.2.mlp.act 1 1 11578 11579 #11578=(1,2304,384)f32 #11579=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.mlp.drop 1 1 11579 11580 #11579=(1,2304,384)f32 #11580=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.2.mlp.fc2 1 1 11580 11581 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #11580=(1,2304,384)f32 #11581=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.2.mlp.drop 1 1 11581 11582 #11581=(1,2304,192)f32 #11582=(1,2304,192)f32 prim::Constant pnnx_11560 0 1 11583 value=None prim::Constant pnnx_11561 0 1 23653 value=1 aten::add pnnx_11562 3 1 input.259 11582 23653 11584 #input.259=(1,2304,192)f32 #11582=(1,2304,192)f32 #11584=(1,2304,192)f32 prim::Constant pnnx_11563 0 1 11585 value=trunc prim::Constant pnnx_11564 0 1 11586 value=8 prim::Constant pnnx_11565 0 1 11587 value=0 prim::Constant pnnx_11566 0 1 11588 value=2 prim::Constant pnnx_11567 0 1 11589 value=-4 prim::Constant pnnx_11568 0 1 11590 value=1 prim::Constant pnnx_11569 0 1 11591 value=3 prim::Constant pnnx_11570 0 1 11592 value=8 prim::Constant pnnx_11571 0 1 11593 value=4 prim::Constant pnnx_11572 0 1 11594 value=5 prim::Constant pnnx_11573 0 1 11595 value=-1 prim::Constant pnnx_11574 0 1 11596 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.3 0 1 attn_mask.59 @attn_mask=(36,64,64)f32 #attn_mask.59=(36,64,64)f32 aten::size pnnx_11575 2 1 11584 11587 11603 #11584=(1,2304,192)f32 prim::NumToTensor pnnx_11576 1 1 11603 B.139 aten::Int pnnx_11577 1 1 B.139 11605 aten::Int pnnx_11578 1 1 B.139 11606 aten::size pnnx_11579 2 1 11584 11588 11607 #11584=(1,2304,192)f32 prim::NumToTensor pnnx_11580 1 1 11607 C.239 aten::Int pnnx_11581 1 1 C.239 11609 aten::Int pnnx_11582 1 1 C.239 11610 aten::Int pnnx_11583 1 1 C.239 11611 aten::Int pnnx_11584 1 1 C.239 11612 nn.LayerNorm layers_dfe.3.residual_group.blocks.3.norm1 1 1 11584 11613 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #11584=(1,2304,192)f32 #11613=(1,2304,192)f32 prim::ListConstruct pnnx_11585 4 1 11606 947 1187 11612 11614 prim::Constant pnnx_11587 0 1 23654 value=-4 prim::ListConstruct pnnx_11588 2 1 11589 23654 11616 prim::Constant pnnx_11589 0 1 23655 value=2 prim::ListConstruct pnnx_11590 2 1 11590 23655 11617 Tensor.view Tensor.view_1610 2 1 11613 11614 x.117 $input=11613 $shape=11614 #11613=(1,2304,192)f32 #x.117=(1,48,48,192)f32 prim::Constant pnnx_11592 0 1 23656 value=0 torch.roll torch.roll_2476 3 1 x.117 11616 11617 x6.45 $input=x.117 $shifts=11616 $dims=11617 #x.117=(1,48,48,192)f32 #x6.45=(1,48,48,192)f32 aten::size pnnx_11593 2 1 x6.45 23656 11619 #x6.45=(1,48,48,192)f32 prim::NumToTensor pnnx_11594 1 1 11619 B1.45 aten::Int pnnx_11595 1 1 B1.45 11621 prim::Constant pnnx_11596 0 1 23657 value=1 aten::size pnnx_11597 2 1 x6.45 23657 11622 #x6.45=(1,48,48,192)f32 prim::NumToTensor pnnx_11598 1 1 11622 11623 prim::Constant pnnx_11599 0 1 23658 value=2 aten::size pnnx_11600 2 1 x6.45 23658 11624 #x6.45=(1,48,48,192)f32 prim::NumToTensor pnnx_11601 1 1 11624 11625 aten::size pnnx_11602 2 1 x6.45 11591 11626 #x6.45=(1,48,48,192)f32 prim::NumToTensor pnnx_11603 1 1 11626 C1.45 aten::Int pnnx_11604 1 1 C1.45 11628 aten::Int pnnx_11605 1 1 C1.45 11629 aten::div pnnx_11606 3 1 11623 11586 11585 11630 aten::Int pnnx_11607 1 1 11630 11631 prim::Constant pnnx_11608 0 1 23659 value=8 prim::Constant pnnx_11609 0 1 23660 value=trunc aten::div pnnx_11610 3 1 11625 23659 23660 11632 aten::Int pnnx_11611 1 1 11632 11633 prim::Constant pnnx_11612 0 1 23661 value=8 prim::ListConstruct pnnx_11613 6 1 11621 11631 11592 11633 23661 11629 11634 prim::Constant pnnx_11615 0 1 23662 value=0 prim::Constant pnnx_11616 0 1 23663 value=1 prim::Constant pnnx_11617 0 1 23664 value=3 prim::Constant pnnx_11618 0 1 23665 value=2 prim::ListConstruct pnnx_11619 6 1 23662 23663 23664 23665 11593 11594 11636 Tensor.view Tensor.view_1611 2 1 x6.45 11634 x7.45 $input=x6.45 $shape=11634 #x6.45=(1,48,48,192)f32 #x7.45=(1,6,8,6,8,192)f32 prim::Constant pnnx_11623 0 1 23667 value=8 prim::Constant pnnx_11624 0 1 23668 value=8 prim::ListConstruct pnnx_11625 4 1 11595 23667 23668 11628 11639 torch.permute torch.permute_2756 2 1 x7.45 11636 11637 $input=x7.45 $dims=11636 #x7.45=(1,6,8,6,8,192)f32 #11637=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_173 1 1 11637 11638 memory_format=torch.contiguous_format $input=11637 #11637=(1,6,6,8,8,192)f32 #11638=(1,6,6,8,8,192)f32 prim::Constant pnnx_11627 0 1 23669 value=-1 prim::ListConstruct pnnx_11628 3 1 23669 11596 11611 11641 prim::Constant pnnx_11630 0 1 11643 value=1.767767e-01 prim::Constant pnnx_11631 0 1 11644 value=trunc prim::Constant pnnx_11632 0 1 11645 value=6 prim::Constant pnnx_11633 0 1 11646 value=0 prim::Constant pnnx_11634 0 1 11647 value=1 prim::Constant pnnx_11635 0 1 11648 value=2 prim::Constant pnnx_11636 0 1 11649 value=3 prim::Constant pnnx_11637 0 1 11650 value=6 prim::Constant pnnx_11638 0 1 11651 value=4 prim::Constant pnnx_11639 0 1 11652 value=-2 prim::Constant pnnx_11640 0 1 11653 value=-1 prim::Constant pnnx_11641 0 1 11654 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.3.attn 0 1 relative_position_bias_table.117 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.117=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.3.attn 0 1 relative_position_index.117 @relative_position_index=(64,64)i64 #relative_position_index.117=(64,64)i64 Tensor.view Tensor.view_1612 2 1 11638 11639 x_windows.117 $input=11638 $shape=11639 #11638=(1,6,6,8,8,192)f32 #x_windows.117=(36,8,8,192)f32 Tensor.view Tensor.view_1613 2 1 x_windows.117 11641 x8.45 $input=x_windows.117 $shape=11641 #x_windows.117=(36,8,8,192)f32 #x8.45=(36,64,192)f32 aten::size pnnx_11642 2 1 x8.45 11646 11662 #x8.45=(36,64,192)f32 prim::NumToTensor pnnx_11643 1 1 11662 B_.117 aten::Int pnnx_11644 1 1 B_.117 11664 aten::Int pnnx_11645 1 1 B_.117 11665 aten::size pnnx_11646 2 1 x8.45 11647 11666 #x8.45=(36,64,192)f32 prim::NumToTensor pnnx_11647 1 1 11666 N.117 aten::Int pnnx_11648 1 1 N.117 11668 aten::Int pnnx_11649 1 1 N.117 11669 aten::Int pnnx_11650 1 1 N.117 11670 aten::Int pnnx_11651 1 1 N.117 11671 aten::Int pnnx_11652 1 1 N.117 11672 aten::Int pnnx_11653 1 1 N.117 11673 aten::size pnnx_11654 2 1 x8.45 11648 11674 #x8.45=(36,64,192)f32 prim::NumToTensor pnnx_11655 1 1 11674 C.241 aten::Int pnnx_11656 1 1 C.241 11676 nn.Linear layers_dfe.3.residual_group.blocks.3.attn.qkv 1 1 x8.45 11677 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.45=(36,64,192)f32 #11677=(36,64,576)f32 aten::div pnnx_11657 3 1 C.241 11645 11644 11678 aten::Int pnnx_11658 1 1 11678 11679 prim::ListConstruct pnnx_11659 5 1 11665 11673 11649 11650 11679 11680 prim::Constant pnnx_11661 0 1 23670 value=2 prim::Constant pnnx_11662 0 1 23671 value=0 prim::Constant pnnx_11663 0 1 23672 value=3 prim::Constant pnnx_11664 0 1 23673 value=1 prim::ListConstruct pnnx_11665 5 1 23670 23671 23672 23673 11651 11682 Tensor.reshape Tensor.reshape_548 2 1 11677 11680 11681 $input=11677 $shape=11680 #11677=(36,64,576)f32 #11681=(36,64,3,6,32)f32 prim::Constant pnnx_11667 0 1 23674 value=0 prim::Constant pnnx_11668 0 1 23675 value=0 prim::Constant pnnx_11670 0 1 23676 value=0 prim::Constant pnnx_11671 0 1 23677 value=1 prim::Constant pnnx_11673 0 1 23678 value=0 prim::Constant pnnx_11674 0 1 23679 value=2 torch.permute torch.permute_2757 2 1 11681 11682 qkv1.45 $input=11681 $dims=11682 #11681=(36,64,3,6,32)f32 #qkv1.45=(3,36,6,64,32)f32 Tensor.select Tensor.select_821 3 1 qkv1.45 23674 23675 q.117 $input=qkv1.45 $dim=23674 $index=23675 #qkv1.45=(3,36,6,64,32)f32 #q.117=(36,6,64,32)f32 aten::mul pnnx_11676 2 1 q.117 11643 q1.45 #q.117=(36,6,64,32)f32 #q1.45=(36,6,64,32)f32 Tensor.select Tensor.select_822 3 1 qkv1.45 23676 23677 k.117 $input=qkv1.45 $dim=23676 $index=23677 #qkv1.45=(3,36,6,64,32)f32 #k.117=(36,6,64,32)f32 prim::Constant pnnx_11679 0 1 23680 value=-1 prim::ListConstruct pnnx_11680 1 1 23680 11690 Tensor.view Tensor.view_1614 2 1 relative_position_index.117 11690 11691 $input=relative_position_index.117 $shape=11690 #relative_position_index.117=(64,64)i64 #11691=(4096)i64 prim::ListConstruct pnnx_11682 1 1 11691 11692 #11691=(4096)i64 prim::Constant pnnx_11684 0 1 23681 value=64 prim::Constant pnnx_11685 0 1 23682 value=-1 prim::ListConstruct pnnx_11686 3 1 11654 23681 23682 11694 Tensor.index Tensor.index_383 2 1 relative_position_bias_table.117 11692 11693 $input=relative_position_bias_table.117 $expr=11692 #relative_position_bias_table.117=(225,6)f32 #11693=(4096,6)f32 prim::Constant pnnx_11688 0 1 23683 value=2 prim::Constant pnnx_11689 0 1 23684 value=0 prim::Constant pnnx_11690 0 1 23685 value=1 prim::ListConstruct pnnx_11691 3 1 23683 23684 23685 11696 Tensor.view Tensor.view_1615 2 1 11693 11694 relative_position_bias.117 $input=11693 $shape=11694 #11693=(4096,6)f32 #relative_position_bias.117=(64,64,6)f32 prim::Constant pnnx_11695 0 1 23687 value=0 torch.permute torch.permute_2758 2 1 relative_position_bias.117 11696 11697 $input=relative_position_bias.117 $dims=11696 #relative_position_bias.117=(64,64,6)f32 #11697=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_174 1 1 11697 relative_position_bias1.45 memory_format=torch.contiguous_format $input=11697 #11697=(6,64,64)f32 #relative_position_bias1.45=(6,64,64)f32 prim::Constant pnnx_11697 0 1 23688 value=1 torch.transpose torch.transpose_3095 3 1 k.117 11652 11653 11688 $input=k.117 $dim0=11652 $dim1=11653 #k.117=(36,6,64,32)f32 #11688=(36,6,32,64)f32 torch.matmul torch.matmul_2318 2 1 q1.45 11688 attn.235 $input=q1.45 $other=11688 #q1.45=(36,6,64,32)f32 #11688=(36,6,32,64)f32 #attn.235=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3331 2 1 relative_position_bias1.45 23687 11699 $input=relative_position_bias1.45 $dim=23687 #relative_position_bias1.45=(6,64,64)f32 #11699=(1,6,64,64)f32 aten::add pnnx_11698 3 1 attn.235 11699 23688 attn2.23 #attn.235=(36,6,64,64)f32 #11699=(1,6,64,64)f32 #attn2.23=(36,6,64,64)f32 prim::Constant pnnx_11699 0 1 23689 value=0 aten::size pnnx_11700 2 1 attn_mask.59 23689 11701 #attn_mask.59=(36,64,64)f32 prim::NumToTensor pnnx_11701 1 1 11701 other.59 aten::Int pnnx_11702 1 1 other.59 11703 prim::Constant pnnx_11703 0 1 23690 value=trunc aten::div pnnx_11704 3 1 B_.117 other.59 23690 11704 aten::Int pnnx_11705 1 1 11704 11705 prim::Constant pnnx_11706 0 1 23691 value=6 prim::ListConstruct pnnx_11707 5 1 11705 11703 23691 11672 11671 11706 prim::Constant pnnx_11709 0 1 23692 value=1 prim::Constant pnnx_11711 0 1 23693 value=0 prim::Constant pnnx_11713 0 1 23694 value=1 Tensor.view Tensor.view_1616 2 1 attn2.23 11706 11707 $input=attn2.23 $shape=11706 #attn2.23=(36,6,64,64)f32 #11707=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3332 2 1 attn_mask.59 23692 11708 $input=attn_mask.59 $dim=23692 #attn_mask.59=(36,64,64)f32 #11708=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3333 2 1 11708 23693 11709 $input=11708 $dim=23693 #11708=(36,1,64,64)f32 #11709=(1,36,1,64,64)f32 aten::add pnnx_11714 3 1 11707 11709 23694 attn3.23 #11707=(1,36,6,64,64)f32 #11709=(1,36,1,64,64)f32 #attn3.23=(1,36,6,64,64)f32 prim::Constant pnnx_11715 0 1 23695 value=-1 prim::Constant pnnx_11716 0 1 23696 value=6 prim::ListConstruct pnnx_11717 4 1 23695 23696 11670 11669 11711 Tensor.view Tensor.view_1617 2 1 attn3.23 11711 input.261 $input=attn3.23 $shape=11711 #attn3.23=(1,36,6,64,64)f32 #input.261=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.3.attn.softmax 1 1 input.261 11713 dim=-1 #input.261=(36,6,64,64)f32 #11713=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.attn.attn_drop 1 1 11713 11714 #11713=(36,6,64,64)f32 #11714=(36,6,64,64)f32 Tensor.select Tensor.select_823 3 1 qkv1.45 23678 23679 v.117 $input=qkv1.45 $dim=23678 $index=23679 #qkv1.45=(3,36,6,64,32)f32 #v.117=(36,6,64,32)f32 prim::Constant pnnx_11720 0 1 23697 value=1 prim::Constant pnnx_11721 0 1 23698 value=2 torch.matmul torch.matmul_2319 2 1 11714 v.117 11715 $input=11714 $other=v.117 #11714=(36,6,64,64)f32 #v.117=(36,6,64,32)f32 #11715=(36,6,64,32)f32 prim::ListConstruct pnnx_11723 3 1 11664 11668 11676 11717 torch.transpose torch.transpose_3096 3 1 11715 23697 23698 11716 $input=11715 $dim0=23697 $dim1=23698 #11715=(36,6,64,32)f32 #11716=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_549 2 1 11716 11717 input1.47 $input=11716 $shape=11717 #11716=(36,64,6,32)f32 #input1.47=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.3.attn.proj 1 1 input1.47 11719 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.47=(36,64,192)f32 #11719=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.attn.proj_drop 1 1 11719 11720 #11719=(36,64,192)f32 #11720=(36,64,192)f32 prim::Constant pnnx_11725 0 1 23699 value=-1 prim::Constant pnnx_11726 0 1 23700 value=8 prim::Constant pnnx_11727 0 1 23701 value=8 prim::ListConstruct pnnx_11728 4 1 23699 23700 23701 11610 11721 prim::Constant pnnx_11730 0 1 23702 value=8 prim::Constant pnnx_11731 0 1 23703 value=trunc aten::div pnnx_11732 3 1 H0.1 23702 23703 11723 aten::Int pnnx_11733 1 1 11723 11724 prim::Constant pnnx_11734 0 1 23704 value=8 prim::Constant pnnx_11735 0 1 23705 value=trunc aten::div pnnx_11736 3 1 W0.1 23704 23705 11725 aten::Int pnnx_11737 1 1 11725 11726 prim::Constant pnnx_11738 0 1 23706 value=1 prim::Constant pnnx_11739 0 1 23707 value=8 prim::Constant pnnx_11740 0 1 23708 value=8 prim::Constant pnnx_11741 0 1 23709 value=-1 prim::ListConstruct pnnx_11742 6 1 23706 11724 11726 23707 23708 23709 11727 prim::Constant pnnx_11744 0 1 23710 value=0 prim::Constant pnnx_11745 0 1 23711 value=1 prim::Constant pnnx_11746 0 1 23712 value=3 prim::Constant pnnx_11747 0 1 23713 value=2 prim::Constant pnnx_11748 0 1 23714 value=4 prim::Constant pnnx_11749 0 1 23715 value=5 prim::ListConstruct pnnx_11750 6 1 23710 23711 23712 23713 23714 23715 11729 Tensor.view Tensor.view_1618 2 1 11720 11721 windows.117 $input=11720 $shape=11721 #11720=(36,64,192)f32 #windows.117=(36,8,8,192)f32 Tensor.view Tensor.view_1619 2 1 windows.117 11727 x9.45 $input=windows.117 $shape=11727 #windows.117=(36,8,8,192)f32 #x9.45=(1,6,6,8,8,192)f32 prim::Constant pnnx_11754 0 1 23717 value=1 prim::Constant pnnx_11755 0 1 23718 value=-1 prim::ListConstruct pnnx_11756 4 1 23717 944 1184 23718 11732 torch.permute torch.permute_2759 2 1 x9.45 11729 11730 $input=x9.45 $dims=11729 #x9.45=(1,6,6,8,8,192)f32 #11730=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_175 1 1 11730 11731 memory_format=torch.contiguous_format $input=11730 #11730=(1,6,8,6,8,192)f32 #11731=(1,6,8,6,8,192)f32 prim::Constant pnnx_11758 0 1 23719 value=4 prim::Constant pnnx_11759 0 1 23720 value=4 prim::ListConstruct pnnx_11760 2 1 23719 23720 11734 prim::Constant pnnx_11761 0 1 23721 value=1 prim::Constant pnnx_11762 0 1 23722 value=2 prim::ListConstruct pnnx_11763 2 1 23721 23722 11735 Tensor.view Tensor.view_1620 2 1 11731 11732 shifted_x.59 $input=11731 $shape=11732 #11731=(1,6,8,6,8,192)f32 #shifted_x.59=(1,48,48,192)f32 aten::mul pnnx_11765 2 1 H0.1 W0.1 11737 aten::Int pnnx_11766 1 1 11737 11738 prim::ListConstruct pnnx_11767 3 1 11605 11738 11609 11739 prim::Constant pnnx_11769 0 1 11741 value=None prim::Constant pnnx_11770 0 1 23723 value=1 torch.roll torch.roll_2477 3 1 shifted_x.59 11734 11735 x10.23 $input=shifted_x.59 $shifts=11734 $dims=11735 #shifted_x.59=(1,48,48,192)f32 #x10.23=(1,48,48,192)f32 Tensor.view Tensor.view_1621 2 1 x10.23 11739 x11.23 $input=x10.23 $shape=11739 #x10.23=(1,48,48,192)f32 #x11.23=(1,2304,192)f32 aten::add pnnx_11771 3 1 11584 x11.23 23723 input.263 #11584=(1,2304,192)f32 #x11.23=(1,2304,192)f32 #input.263=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.3.norm2 1 1 input.263 11743 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.263=(1,2304,192)f32 #11743=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.3.mlp.fc1 1 1 11743 11748 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #11743=(1,2304,192)f32 #11748=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.3.mlp.act 1 1 11748 11749 #11748=(1,2304,384)f32 #11749=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.mlp.drop 1 1 11749 11750 #11749=(1,2304,384)f32 #11750=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.3.mlp.fc2 1 1 11750 11751 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #11750=(1,2304,384)f32 #11751=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.3.mlp.drop 1 1 11751 11752 #11751=(1,2304,192)f32 #11752=(1,2304,192)f32 prim::Constant pnnx_11772 0 1 11753 value=None prim::Constant pnnx_11773 0 1 23724 value=1 aten::add pnnx_11774 3 1 input.263 11752 23724 11754 #input.263=(1,2304,192)f32 #11752=(1,2304,192)f32 #11754=(1,2304,192)f32 prim::Constant pnnx_11775 0 1 11755 value=trunc prim::Constant pnnx_11776 0 1 11756 value=8 prim::Constant pnnx_11777 0 1 11757 value=0 prim::Constant pnnx_11778 0 1 11758 value=2 prim::Constant pnnx_11779 0 1 11759 value=1 prim::Constant pnnx_11780 0 1 11760 value=3 prim::Constant pnnx_11781 0 1 11761 value=8 prim::Constant pnnx_11782 0 1 11762 value=4 prim::Constant pnnx_11783 0 1 11763 value=5 prim::Constant pnnx_11784 0 1 11764 value=-1 prim::Constant pnnx_11785 0 1 11765 value=64 aten::size pnnx_11786 2 1 11754 11757 11771 #11754=(1,2304,192)f32 prim::NumToTensor pnnx_11787 1 1 11771 B.141 aten::Int pnnx_11788 1 1 B.141 11773 aten::Int pnnx_11789 1 1 B.141 11774 aten::size pnnx_11790 2 1 11754 11758 11775 #11754=(1,2304,192)f32 prim::NumToTensor pnnx_11791 1 1 11775 C.243 aten::Int pnnx_11792 1 1 C.243 11777 aten::Int pnnx_11793 1 1 C.243 11778 aten::Int pnnx_11794 1 1 C.243 11779 aten::Int pnnx_11795 1 1 C.243 11780 nn.LayerNorm layers_dfe.3.residual_group.blocks.4.norm1 1 1 11754 11781 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #11754=(1,2304,192)f32 #11781=(1,2304,192)f32 prim::ListConstruct pnnx_11796 4 1 11774 941 1181 11780 11782 prim::Constant pnnx_11798 0 1 23725 value=0 Tensor.view Tensor.view_1622 2 1 11781 11782 x.119 $input=11781 $shape=11782 #11781=(1,2304,192)f32 #x.119=(1,48,48,192)f32 aten::size pnnx_11799 2 1 x.119 23725 11784 #x.119=(1,48,48,192)f32 prim::NumToTensor pnnx_11800 1 1 11784 B1.47 aten::Int pnnx_11801 1 1 B1.47 11786 aten::size pnnx_11802 2 1 x.119 11759 11787 #x.119=(1,48,48,192)f32 prim::NumToTensor pnnx_11803 1 1 11787 11788 prim::Constant pnnx_11804 0 1 23726 value=2 aten::size pnnx_11805 2 1 x.119 23726 11789 #x.119=(1,48,48,192)f32 prim::NumToTensor pnnx_11806 1 1 11789 11790 aten::size pnnx_11807 2 1 x.119 11760 11791 #x.119=(1,48,48,192)f32 prim::NumToTensor pnnx_11808 1 1 11791 C1.47 aten::Int pnnx_11809 1 1 C1.47 11793 aten::Int pnnx_11810 1 1 C1.47 11794 aten::div pnnx_11811 3 1 11788 11756 11755 11795 aten::Int pnnx_11812 1 1 11795 11796 prim::Constant pnnx_11813 0 1 23727 value=8 prim::Constant pnnx_11814 0 1 23728 value=trunc aten::div pnnx_11815 3 1 11790 23727 23728 11797 aten::Int pnnx_11816 1 1 11797 11798 prim::Constant pnnx_11817 0 1 23729 value=8 prim::ListConstruct pnnx_11818 6 1 11786 11796 11761 11798 23729 11794 11799 prim::Constant pnnx_11820 0 1 23730 value=0 prim::Constant pnnx_11821 0 1 23731 value=1 prim::Constant pnnx_11822 0 1 23732 value=3 prim::Constant pnnx_11823 0 1 23733 value=2 prim::ListConstruct pnnx_11824 6 1 23730 23731 23732 23733 11762 11763 11801 Tensor.view Tensor.view_1623 2 1 x.119 11799 x5.61 $input=x.119 $shape=11799 #x.119=(1,48,48,192)f32 #x5.61=(1,6,8,6,8,192)f32 prim::Constant pnnx_11828 0 1 23735 value=8 prim::Constant pnnx_11829 0 1 23736 value=8 prim::ListConstruct pnnx_11830 4 1 11764 23735 23736 11793 11804 torch.permute torch.permute_2760 2 1 x5.61 11801 11802 $input=x5.61 $dims=11801 #x5.61=(1,6,8,6,8,192)f32 #11802=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_176 1 1 11802 11803 memory_format=torch.contiguous_format $input=11802 #11802=(1,6,6,8,8,192)f32 #11803=(1,6,6,8,8,192)f32 prim::Constant pnnx_11832 0 1 23737 value=-1 prim::ListConstruct pnnx_11833 3 1 23737 11765 11779 11806 prim::Constant pnnx_11835 0 1 11808 value=1.767767e-01 prim::Constant pnnx_11836 0 1 11809 value=trunc prim::Constant pnnx_11837 0 1 11810 value=6 prim::Constant pnnx_11838 0 1 11811 value=0 prim::Constant pnnx_11839 0 1 11812 value=1 prim::Constant pnnx_11840 0 1 11813 value=2 prim::Constant pnnx_11841 0 1 11814 value=3 prim::Constant pnnx_11842 0 1 11815 value=6 prim::Constant pnnx_11843 0 1 11816 value=4 prim::Constant pnnx_11844 0 1 11817 value=-2 prim::Constant pnnx_11845 0 1 11818 value=-1 prim::Constant pnnx_11846 0 1 11819 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.4.attn 0 1 relative_position_bias_table.119 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.119=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.4.attn 0 1 relative_position_index.119 @relative_position_index=(64,64)i64 #relative_position_index.119=(64,64)i64 Tensor.view Tensor.view_1624 2 1 11803 11804 x_windows.119 $input=11803 $shape=11804 #11803=(1,6,6,8,8,192)f32 #x_windows.119=(36,8,8,192)f32 Tensor.view Tensor.view_1625 2 1 x_windows.119 11806 x6.47 $input=x_windows.119 $shape=11806 #x_windows.119=(36,8,8,192)f32 #x6.47=(36,64,192)f32 aten::size pnnx_11847 2 1 x6.47 11811 11827 #x6.47=(36,64,192)f32 prim::NumToTensor pnnx_11848 1 1 11827 B_.119 aten::Int pnnx_11849 1 1 B_.119 11829 aten::Int pnnx_11850 1 1 B_.119 11830 aten::size pnnx_11851 2 1 x6.47 11812 11831 #x6.47=(36,64,192)f32 prim::NumToTensor pnnx_11852 1 1 11831 N.119 aten::Int pnnx_11853 1 1 N.119 11833 aten::Int pnnx_11854 1 1 N.119 11834 aten::size pnnx_11855 2 1 x6.47 11813 11835 #x6.47=(36,64,192)f32 prim::NumToTensor pnnx_11856 1 1 11835 C.245 aten::Int pnnx_11857 1 1 C.245 11837 nn.Linear layers_dfe.3.residual_group.blocks.4.attn.qkv 1 1 x6.47 11838 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.47=(36,64,192)f32 #11838=(36,64,576)f32 aten::div pnnx_11858 3 1 C.245 11810 11809 11839 aten::Int pnnx_11859 1 1 11839 11840 prim::ListConstruct pnnx_11860 5 1 11830 11834 11814 11815 11840 11841 prim::Constant pnnx_11862 0 1 23738 value=2 prim::Constant pnnx_11863 0 1 23739 value=0 prim::Constant pnnx_11864 0 1 23740 value=3 prim::Constant pnnx_11865 0 1 23741 value=1 prim::ListConstruct pnnx_11866 5 1 23738 23739 23740 23741 11816 11843 Tensor.reshape Tensor.reshape_550 2 1 11838 11841 11842 $input=11838 $shape=11841 #11838=(36,64,576)f32 #11842=(36,64,3,6,32)f32 prim::Constant pnnx_11868 0 1 23742 value=0 prim::Constant pnnx_11869 0 1 23743 value=0 prim::Constant pnnx_11871 0 1 23744 value=0 prim::Constant pnnx_11872 0 1 23745 value=1 prim::Constant pnnx_11874 0 1 23746 value=0 prim::Constant pnnx_11875 0 1 23747 value=2 torch.permute torch.permute_2761 2 1 11842 11843 qkv1.47 $input=11842 $dims=11843 #11842=(36,64,3,6,32)f32 #qkv1.47=(3,36,6,64,32)f32 Tensor.select Tensor.select_824 3 1 qkv1.47 23742 23743 q.119 $input=qkv1.47 $dim=23742 $index=23743 #qkv1.47=(3,36,6,64,32)f32 #q.119=(36,6,64,32)f32 aten::mul pnnx_11877 2 1 q.119 11808 q1.47 #q.119=(36,6,64,32)f32 #q1.47=(36,6,64,32)f32 Tensor.select Tensor.select_825 3 1 qkv1.47 23744 23745 k.119 $input=qkv1.47 $dim=23744 $index=23745 #qkv1.47=(3,36,6,64,32)f32 #k.119=(36,6,64,32)f32 prim::Constant pnnx_11880 0 1 23748 value=-1 prim::ListConstruct pnnx_11881 1 1 23748 11851 Tensor.view Tensor.view_1626 2 1 relative_position_index.119 11851 11852 $input=relative_position_index.119 $shape=11851 #relative_position_index.119=(64,64)i64 #11852=(4096)i64 prim::ListConstruct pnnx_11883 1 1 11852 11853 #11852=(4096)i64 prim::Constant pnnx_11885 0 1 23749 value=64 prim::Constant pnnx_11886 0 1 23750 value=-1 prim::ListConstruct pnnx_11887 3 1 11819 23749 23750 11855 Tensor.index Tensor.index_384 2 1 relative_position_bias_table.119 11853 11854 $input=relative_position_bias_table.119 $expr=11853 #relative_position_bias_table.119=(225,6)f32 #11854=(4096,6)f32 prim::Constant pnnx_11889 0 1 23751 value=2 prim::Constant pnnx_11890 0 1 23752 value=0 prim::Constant pnnx_11891 0 1 23753 value=1 prim::ListConstruct pnnx_11892 3 1 23751 23752 23753 11857 Tensor.view Tensor.view_1627 2 1 11854 11855 relative_position_bias.119 $input=11854 $shape=11855 #11854=(4096,6)f32 #relative_position_bias.119=(64,64,6)f32 prim::Constant pnnx_11896 0 1 23755 value=0 torch.permute torch.permute_2762 2 1 relative_position_bias.119 11857 11858 $input=relative_position_bias.119 $dims=11857 #relative_position_bias.119=(64,64,6)f32 #11858=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_177 1 1 11858 relative_position_bias1.47 memory_format=torch.contiguous_format $input=11858 #11858=(6,64,64)f32 #relative_position_bias1.47=(6,64,64)f32 prim::Constant pnnx_11898 0 1 23756 value=1 torch.transpose torch.transpose_3097 3 1 k.119 11817 11818 11849 $input=k.119 $dim0=11817 $dim1=11818 #k.119=(36,6,64,32)f32 #11849=(36,6,32,64)f32 torch.matmul torch.matmul_2320 2 1 q1.47 11849 attn.239 $input=q1.47 $other=11849 #q1.47=(36,6,64,32)f32 #11849=(36,6,32,64)f32 #attn.239=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3334 2 1 relative_position_bias1.47 23755 11860 $input=relative_position_bias1.47 $dim=23755 #relative_position_bias1.47=(6,64,64)f32 #11860=(1,6,64,64)f32 aten::add pnnx_11899 3 1 attn.239 11860 23756 input.265 #attn.239=(36,6,64,64)f32 #11860=(1,6,64,64)f32 #input.265=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.4.attn.softmax 1 1 input.265 11862 dim=-1 #input.265=(36,6,64,64)f32 #11862=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.attn.attn_drop 1 1 11862 11863 #11862=(36,6,64,64)f32 #11863=(36,6,64,64)f32 Tensor.select Tensor.select_826 3 1 qkv1.47 23746 23747 v.119 $input=qkv1.47 $dim=23746 $index=23747 #qkv1.47=(3,36,6,64,32)f32 #v.119=(36,6,64,32)f32 prim::Constant pnnx_11901 0 1 23757 value=1 prim::Constant pnnx_11902 0 1 23758 value=2 torch.matmul torch.matmul_2321 2 1 11863 v.119 11864 $input=11863 $other=v.119 #11863=(36,6,64,64)f32 #v.119=(36,6,64,32)f32 #11864=(36,6,64,32)f32 prim::ListConstruct pnnx_11904 3 1 11829 11833 11837 11866 torch.transpose torch.transpose_3098 3 1 11864 23757 23758 11865 $input=11864 $dim0=23757 $dim1=23758 #11864=(36,6,64,32)f32 #11865=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_551 2 1 11865 11866 input1.49 $input=11865 $shape=11866 #11865=(36,64,6,32)f32 #input1.49=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.4.attn.proj 1 1 input1.49 11868 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.49=(36,64,192)f32 #11868=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.attn.proj_drop 1 1 11868 11869 #11868=(36,64,192)f32 #11869=(36,64,192)f32 prim::Constant pnnx_11906 0 1 23759 value=-1 prim::Constant pnnx_11907 0 1 23760 value=8 prim::Constant pnnx_11908 0 1 23761 value=8 prim::ListConstruct pnnx_11909 4 1 23759 23760 23761 11778 11870 prim::Constant pnnx_11911 0 1 23762 value=8 prim::Constant pnnx_11912 0 1 23763 value=trunc aten::div pnnx_11913 3 1 H0.1 23762 23763 11872 aten::Int pnnx_11914 1 1 11872 11873 prim::Constant pnnx_11915 0 1 23764 value=8 prim::Constant pnnx_11916 0 1 23765 value=trunc aten::div pnnx_11917 3 1 W0.1 23764 23765 11874 aten::Int pnnx_11918 1 1 11874 11875 prim::Constant pnnx_11919 0 1 23766 value=1 prim::Constant pnnx_11920 0 1 23767 value=8 prim::Constant pnnx_11921 0 1 23768 value=8 prim::Constant pnnx_11922 0 1 23769 value=-1 prim::ListConstruct pnnx_11923 6 1 23766 11873 11875 23767 23768 23769 11876 prim::Constant pnnx_11925 0 1 23770 value=0 prim::Constant pnnx_11926 0 1 23771 value=1 prim::Constant pnnx_11927 0 1 23772 value=3 prim::Constant pnnx_11928 0 1 23773 value=2 prim::Constant pnnx_11929 0 1 23774 value=4 prim::Constant pnnx_11930 0 1 23775 value=5 prim::ListConstruct pnnx_11931 6 1 23770 23771 23772 23773 23774 23775 11878 Tensor.view Tensor.view_1628 2 1 11869 11870 windows.119 $input=11869 $shape=11870 #11869=(36,64,192)f32 #windows.119=(36,8,8,192)f32 Tensor.view Tensor.view_1629 2 1 windows.119 11876 x7.47 $input=windows.119 $shape=11876 #windows.119=(36,8,8,192)f32 #x7.47=(1,6,6,8,8,192)f32 prim::Constant pnnx_11935 0 1 23777 value=1 prim::Constant pnnx_11936 0 1 23778 value=-1 prim::ListConstruct pnnx_11937 4 1 23777 938 1178 23778 11881 torch.permute torch.permute_2763 2 1 x7.47 11878 11879 $input=x7.47 $dims=11878 #x7.47=(1,6,6,8,8,192)f32 #11879=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_178 1 1 11879 11880 memory_format=torch.contiguous_format $input=11879 #11879=(1,6,8,6,8,192)f32 #11880=(1,6,8,6,8,192)f32 aten::mul pnnx_11939 2 1 H0.1 W0.1 11883 aten::Int pnnx_11940 1 1 11883 11884 prim::ListConstruct pnnx_11941 3 1 11773 11884 11777 11885 prim::Constant pnnx_11943 0 1 11887 value=None prim::Constant pnnx_11944 0 1 23779 value=1 Tensor.view Tensor.view_1630 2 1 11880 11881 x8.47 $input=11880 $shape=11881 #11880=(1,6,8,6,8,192)f32 #x8.47=(1,48,48,192)f32 Tensor.view Tensor.view_1631 2 1 x8.47 11885 x9.47 $input=x8.47 $shape=11885 #x8.47=(1,48,48,192)f32 #x9.47=(1,2304,192)f32 aten::add pnnx_11945 3 1 11754 x9.47 23779 input.267 #11754=(1,2304,192)f32 #x9.47=(1,2304,192)f32 #input.267=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.4.norm2 1 1 input.267 11889 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.267=(1,2304,192)f32 #11889=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.4.mlp.fc1 1 1 11889 11894 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #11889=(1,2304,192)f32 #11894=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.4.mlp.act 1 1 11894 11895 #11894=(1,2304,384)f32 #11895=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.mlp.drop 1 1 11895 11896 #11895=(1,2304,384)f32 #11896=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.4.mlp.fc2 1 1 11896 11897 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #11896=(1,2304,384)f32 #11897=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.4.mlp.drop 1 1 11897 11898 #11897=(1,2304,192)f32 #11898=(1,2304,192)f32 prim::Constant pnnx_11946 0 1 11899 value=None prim::Constant pnnx_11947 0 1 23780 value=1 aten::add pnnx_11948 3 1 input.267 11898 23780 11900 #input.267=(1,2304,192)f32 #11898=(1,2304,192)f32 #11900=(1,2304,192)f32 prim::Constant pnnx_11949 0 1 11901 value=trunc prim::Constant pnnx_11950 0 1 11902 value=8 prim::Constant pnnx_11951 0 1 11903 value=0 prim::Constant pnnx_11952 0 1 11904 value=2 prim::Constant pnnx_11953 0 1 11905 value=-4 prim::Constant pnnx_11954 0 1 11906 value=1 prim::Constant pnnx_11955 0 1 11907 value=3 prim::Constant pnnx_11956 0 1 11908 value=8 prim::Constant pnnx_11957 0 1 11909 value=4 prim::Constant pnnx_11958 0 1 11910 value=5 prim::Constant pnnx_11959 0 1 11911 value=-1 prim::Constant pnnx_11960 0 1 11912 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.5 0 1 attn_mask.61 @attn_mask=(36,64,64)f32 #attn_mask.61=(36,64,64)f32 aten::size pnnx_11961 2 1 11900 11903 11919 #11900=(1,2304,192)f32 prim::NumToTensor pnnx_11962 1 1 11919 B.143 aten::Int pnnx_11963 1 1 B.143 11921 aten::Int pnnx_11964 1 1 B.143 11922 aten::size pnnx_11965 2 1 11900 11904 11923 #11900=(1,2304,192)f32 prim::NumToTensor pnnx_11966 1 1 11923 C.247 aten::Int pnnx_11967 1 1 C.247 11925 aten::Int pnnx_11968 1 1 C.247 11926 aten::Int pnnx_11969 1 1 C.247 11927 aten::Int pnnx_11970 1 1 C.247 11928 nn.LayerNorm layers_dfe.3.residual_group.blocks.5.norm1 1 1 11900 11929 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #11900=(1,2304,192)f32 #11929=(1,2304,192)f32 prim::ListConstruct pnnx_11971 4 1 11922 935 1175 11928 11930 prim::Constant pnnx_11973 0 1 23781 value=-4 prim::ListConstruct pnnx_11974 2 1 11905 23781 11932 prim::Constant pnnx_11975 0 1 23782 value=2 prim::ListConstruct pnnx_11976 2 1 11906 23782 11933 Tensor.view Tensor.view_1632 2 1 11929 11930 x.121 $input=11929 $shape=11930 #11929=(1,2304,192)f32 #x.121=(1,48,48,192)f32 prim::Constant pnnx_11978 0 1 23783 value=0 torch.roll torch.roll_2478 3 1 x.121 11932 11933 x6.49 $input=x.121 $shifts=11932 $dims=11933 #x.121=(1,48,48,192)f32 #x6.49=(1,48,48,192)f32 aten::size pnnx_11979 2 1 x6.49 23783 11935 #x6.49=(1,48,48,192)f32 prim::NumToTensor pnnx_11980 1 1 11935 B1.49 aten::Int pnnx_11981 1 1 B1.49 11937 prim::Constant pnnx_11982 0 1 23784 value=1 aten::size pnnx_11983 2 1 x6.49 23784 11938 #x6.49=(1,48,48,192)f32 prim::NumToTensor pnnx_11984 1 1 11938 11939 prim::Constant pnnx_11985 0 1 23785 value=2 aten::size pnnx_11986 2 1 x6.49 23785 11940 #x6.49=(1,48,48,192)f32 prim::NumToTensor pnnx_11987 1 1 11940 11941 aten::size pnnx_11988 2 1 x6.49 11907 11942 #x6.49=(1,48,48,192)f32 prim::NumToTensor pnnx_11989 1 1 11942 C1.49 aten::Int pnnx_11990 1 1 C1.49 11944 aten::Int pnnx_11991 1 1 C1.49 11945 aten::div pnnx_11992 3 1 11939 11902 11901 11946 aten::Int pnnx_11993 1 1 11946 11947 prim::Constant pnnx_11994 0 1 23786 value=8 prim::Constant pnnx_11995 0 1 23787 value=trunc aten::div pnnx_11996 3 1 11941 23786 23787 11948 aten::Int pnnx_11997 1 1 11948 11949 prim::Constant pnnx_11998 0 1 23788 value=8 prim::ListConstruct pnnx_11999 6 1 11937 11947 11908 11949 23788 11945 11950 prim::Constant pnnx_12001 0 1 23789 value=0 prim::Constant pnnx_12002 0 1 23790 value=1 prim::Constant pnnx_12003 0 1 23791 value=3 prim::Constant pnnx_12004 0 1 23792 value=2 prim::ListConstruct pnnx_12005 6 1 23789 23790 23791 23792 11909 11910 11952 Tensor.view Tensor.view_1633 2 1 x6.49 11950 x7.49 $input=x6.49 $shape=11950 #x6.49=(1,48,48,192)f32 #x7.49=(1,6,8,6,8,192)f32 prim::Constant pnnx_12009 0 1 23794 value=8 prim::Constant pnnx_12010 0 1 23795 value=8 prim::ListConstruct pnnx_12011 4 1 11911 23794 23795 11944 11955 torch.permute torch.permute_2764 2 1 x7.49 11952 11953 $input=x7.49 $dims=11952 #x7.49=(1,6,8,6,8,192)f32 #11953=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_179 1 1 11953 11954 memory_format=torch.contiguous_format $input=11953 #11953=(1,6,6,8,8,192)f32 #11954=(1,6,6,8,8,192)f32 prim::Constant pnnx_12013 0 1 23796 value=-1 prim::ListConstruct pnnx_12014 3 1 23796 11912 11927 11957 prim::Constant pnnx_12016 0 1 11959 value=1.767767e-01 prim::Constant pnnx_12017 0 1 11960 value=trunc prim::Constant pnnx_12018 0 1 11961 value=6 prim::Constant pnnx_12019 0 1 11962 value=0 prim::Constant pnnx_12020 0 1 11963 value=1 prim::Constant pnnx_12021 0 1 11964 value=2 prim::Constant pnnx_12022 0 1 11965 value=3 prim::Constant pnnx_12023 0 1 11966 value=6 prim::Constant pnnx_12024 0 1 11967 value=4 prim::Constant pnnx_12025 0 1 11968 value=-2 prim::Constant pnnx_12026 0 1 11969 value=-1 prim::Constant pnnx_12027 0 1 11970 value=64 pnnx.Attribute layers_dfe.3.residual_group.blocks.5.attn 0 1 relative_position_bias_table.121 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.121=(225,6)f32 pnnx.Attribute layers_dfe.3.residual_group.blocks.5.attn 0 1 relative_position_index.121 @relative_position_index=(64,64)i64 #relative_position_index.121=(64,64)i64 Tensor.view Tensor.view_1634 2 1 11954 11955 x_windows.121 $input=11954 $shape=11955 #11954=(1,6,6,8,8,192)f32 #x_windows.121=(36,8,8,192)f32 Tensor.view Tensor.view_1635 2 1 x_windows.121 11957 x8.49 $input=x_windows.121 $shape=11957 #x_windows.121=(36,8,8,192)f32 #x8.49=(36,64,192)f32 aten::size pnnx_12028 2 1 x8.49 11962 11978 #x8.49=(36,64,192)f32 prim::NumToTensor pnnx_12029 1 1 11978 B_.121 aten::Int pnnx_12030 1 1 B_.121 11980 aten::Int pnnx_12031 1 1 B_.121 11981 aten::size pnnx_12032 2 1 x8.49 11963 11982 #x8.49=(36,64,192)f32 prim::NumToTensor pnnx_12033 1 1 11982 N.121 aten::Int pnnx_12034 1 1 N.121 11984 aten::Int pnnx_12035 1 1 N.121 11985 aten::Int pnnx_12036 1 1 N.121 11986 aten::Int pnnx_12037 1 1 N.121 11987 aten::Int pnnx_12038 1 1 N.121 11988 aten::Int pnnx_12039 1 1 N.121 11989 aten::size pnnx_12040 2 1 x8.49 11964 11990 #x8.49=(36,64,192)f32 prim::NumToTensor pnnx_12041 1 1 11990 C.249 aten::Int pnnx_12042 1 1 C.249 11992 nn.Linear layers_dfe.3.residual_group.blocks.5.attn.qkv 1 1 x8.49 11993 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.49=(36,64,192)f32 #11993=(36,64,576)f32 aten::div pnnx_12043 3 1 C.249 11961 11960 11994 aten::Int pnnx_12044 1 1 11994 11995 prim::ListConstruct pnnx_12045 5 1 11981 11989 11965 11966 11995 11996 prim::Constant pnnx_12047 0 1 23797 value=2 prim::Constant pnnx_12048 0 1 23798 value=0 prim::Constant pnnx_12049 0 1 23799 value=3 prim::Constant pnnx_12050 0 1 23800 value=1 prim::ListConstruct pnnx_12051 5 1 23797 23798 23799 23800 11967 11998 Tensor.reshape Tensor.reshape_552 2 1 11993 11996 11997 $input=11993 $shape=11996 #11993=(36,64,576)f32 #11997=(36,64,3,6,32)f32 prim::Constant pnnx_12053 0 1 23801 value=0 prim::Constant pnnx_12054 0 1 23802 value=0 prim::Constant pnnx_12056 0 1 23803 value=0 prim::Constant pnnx_12057 0 1 23804 value=1 prim::Constant pnnx_12059 0 1 23805 value=0 prim::Constant pnnx_12060 0 1 23806 value=2 torch.permute torch.permute_2765 2 1 11997 11998 qkv1.49 $input=11997 $dims=11998 #11997=(36,64,3,6,32)f32 #qkv1.49=(3,36,6,64,32)f32 Tensor.select Tensor.select_827 3 1 qkv1.49 23801 23802 q.121 $input=qkv1.49 $dim=23801 $index=23802 #qkv1.49=(3,36,6,64,32)f32 #q.121=(36,6,64,32)f32 aten::mul pnnx_12062 2 1 q.121 11959 q1.49 #q.121=(36,6,64,32)f32 #q1.49=(36,6,64,32)f32 Tensor.select Tensor.select_828 3 1 qkv1.49 23803 23804 k.121 $input=qkv1.49 $dim=23803 $index=23804 #qkv1.49=(3,36,6,64,32)f32 #k.121=(36,6,64,32)f32 prim::Constant pnnx_12065 0 1 23807 value=-1 prim::ListConstruct pnnx_12066 1 1 23807 12006 Tensor.view Tensor.view_1636 2 1 relative_position_index.121 12006 12007 $input=relative_position_index.121 $shape=12006 #relative_position_index.121=(64,64)i64 #12007=(4096)i64 prim::ListConstruct pnnx_12068 1 1 12007 12008 #12007=(4096)i64 prim::Constant pnnx_12070 0 1 23808 value=64 prim::Constant pnnx_12071 0 1 23809 value=-1 prim::ListConstruct pnnx_12072 3 1 11970 23808 23809 12010 Tensor.index Tensor.index_385 2 1 relative_position_bias_table.121 12008 12009 $input=relative_position_bias_table.121 $expr=12008 #relative_position_bias_table.121=(225,6)f32 #12009=(4096,6)f32 prim::Constant pnnx_12074 0 1 23810 value=2 prim::Constant pnnx_12075 0 1 23811 value=0 prim::Constant pnnx_12076 0 1 23812 value=1 prim::ListConstruct pnnx_12077 3 1 23810 23811 23812 12012 Tensor.view Tensor.view_1637 2 1 12009 12010 relative_position_bias.121 $input=12009 $shape=12010 #12009=(4096,6)f32 #relative_position_bias.121=(64,64,6)f32 prim::Constant pnnx_12081 0 1 23814 value=0 torch.permute torch.permute_2766 2 1 relative_position_bias.121 12012 12013 $input=relative_position_bias.121 $dims=12012 #relative_position_bias.121=(64,64,6)f32 #12013=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_180 1 1 12013 relative_position_bias1.49 memory_format=torch.contiguous_format $input=12013 #12013=(6,64,64)f32 #relative_position_bias1.49=(6,64,64)f32 prim::Constant pnnx_12083 0 1 23815 value=1 torch.transpose torch.transpose_3099 3 1 k.121 11968 11969 12004 $input=k.121 $dim0=11968 $dim1=11969 #k.121=(36,6,64,32)f32 #12004=(36,6,32,64)f32 torch.matmul torch.matmul_2322 2 1 q1.49 12004 attn.243 $input=q1.49 $other=12004 #q1.49=(36,6,64,32)f32 #12004=(36,6,32,64)f32 #attn.243=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3335 2 1 relative_position_bias1.49 23814 12015 $input=relative_position_bias1.49 $dim=23814 #relative_position_bias1.49=(6,64,64)f32 #12015=(1,6,64,64)f32 aten::add pnnx_12084 3 1 attn.243 12015 23815 attn2.25 #attn.243=(36,6,64,64)f32 #12015=(1,6,64,64)f32 #attn2.25=(36,6,64,64)f32 prim::Constant pnnx_12085 0 1 23816 value=0 aten::size pnnx_12086 2 1 attn_mask.61 23816 12017 #attn_mask.61=(36,64,64)f32 prim::NumToTensor pnnx_12087 1 1 12017 other.61 aten::Int pnnx_12088 1 1 other.61 12019 prim::Constant pnnx_12089 0 1 23817 value=trunc aten::div pnnx_12090 3 1 B_.121 other.61 23817 12020 aten::Int pnnx_12091 1 1 12020 12021 prim::Constant pnnx_12092 0 1 23818 value=6 prim::ListConstruct pnnx_12093 5 1 12021 12019 23818 11988 11987 12022 prim::Constant pnnx_12095 0 1 23819 value=1 prim::Constant pnnx_12097 0 1 23820 value=0 prim::Constant pnnx_12099 0 1 23821 value=1 Tensor.view Tensor.view_1638 2 1 attn2.25 12022 12023 $input=attn2.25 $shape=12022 #attn2.25=(36,6,64,64)f32 #12023=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3336 2 1 attn_mask.61 23819 12024 $input=attn_mask.61 $dim=23819 #attn_mask.61=(36,64,64)f32 #12024=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3337 2 1 12024 23820 12025 $input=12024 $dim=23820 #12024=(36,1,64,64)f32 #12025=(1,36,1,64,64)f32 aten::add pnnx_12100 3 1 12023 12025 23821 attn3.25 #12023=(1,36,6,64,64)f32 #12025=(1,36,1,64,64)f32 #attn3.25=(1,36,6,64,64)f32 prim::Constant pnnx_12101 0 1 23822 value=-1 prim::Constant pnnx_12102 0 1 23823 value=6 prim::ListConstruct pnnx_12103 4 1 23822 23823 11986 11985 12027 Tensor.view Tensor.view_1639 2 1 attn3.25 12027 input.269 $input=attn3.25 $shape=12027 #attn3.25=(1,36,6,64,64)f32 #input.269=(36,6,64,64)f32 nn.Softmax layers_dfe.3.residual_group.blocks.5.attn.softmax 1 1 input.269 12029 dim=-1 #input.269=(36,6,64,64)f32 #12029=(36,6,64,64)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.attn.attn_drop 1 1 12029 12030 #12029=(36,6,64,64)f32 #12030=(36,6,64,64)f32 Tensor.select Tensor.select_829 3 1 qkv1.49 23805 23806 v.121 $input=qkv1.49 $dim=23805 $index=23806 #qkv1.49=(3,36,6,64,32)f32 #v.121=(36,6,64,32)f32 prim::Constant pnnx_12106 0 1 23824 value=1 prim::Constant pnnx_12107 0 1 23825 value=2 torch.matmul torch.matmul_2323 2 1 12030 v.121 12031 $input=12030 $other=v.121 #12030=(36,6,64,64)f32 #v.121=(36,6,64,32)f32 #12031=(36,6,64,32)f32 prim::ListConstruct pnnx_12109 3 1 11980 11984 11992 12033 torch.transpose torch.transpose_3100 3 1 12031 23824 23825 12032 $input=12031 $dim0=23824 $dim1=23825 #12031=(36,6,64,32)f32 #12032=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_553 2 1 12032 12033 input1.51 $input=12032 $shape=12033 #12032=(36,64,6,32)f32 #input1.51=(36,64,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.5.attn.proj 1 1 input1.51 12035 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.51=(36,64,192)f32 #12035=(36,64,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.attn.proj_drop 1 1 12035 12036 #12035=(36,64,192)f32 #12036=(36,64,192)f32 prim::Constant pnnx_12111 0 1 23826 value=-1 prim::Constant pnnx_12112 0 1 23827 value=8 prim::Constant pnnx_12113 0 1 23828 value=8 prim::ListConstruct pnnx_12114 4 1 23826 23827 23828 11926 12037 prim::Constant pnnx_12116 0 1 23829 value=8 prim::Constant pnnx_12117 0 1 23830 value=trunc aten::div pnnx_12118 3 1 H0.1 23829 23830 12039 aten::Int pnnx_12119 1 1 12039 12040 prim::Constant pnnx_12120 0 1 23831 value=8 prim::Constant pnnx_12121 0 1 23832 value=trunc aten::div pnnx_12122 3 1 W0.1 23831 23832 12041 aten::Int pnnx_12123 1 1 12041 12042 prim::Constant pnnx_12124 0 1 23833 value=1 prim::Constant pnnx_12125 0 1 23834 value=8 prim::Constant pnnx_12126 0 1 23835 value=8 prim::Constant pnnx_12127 0 1 23836 value=-1 prim::ListConstruct pnnx_12128 6 1 23833 12040 12042 23834 23835 23836 12043 prim::Constant pnnx_12130 0 1 23837 value=0 prim::Constant pnnx_12131 0 1 23838 value=1 prim::Constant pnnx_12132 0 1 23839 value=3 prim::Constant pnnx_12133 0 1 23840 value=2 prim::Constant pnnx_12134 0 1 23841 value=4 prim::Constant pnnx_12135 0 1 23842 value=5 prim::ListConstruct pnnx_12136 6 1 23837 23838 23839 23840 23841 23842 12045 Tensor.view Tensor.view_1640 2 1 12036 12037 windows.121 $input=12036 $shape=12037 #12036=(36,64,192)f32 #windows.121=(36,8,8,192)f32 Tensor.view Tensor.view_1641 2 1 windows.121 12043 x9.49 $input=windows.121 $shape=12043 #windows.121=(36,8,8,192)f32 #x9.49=(1,6,6,8,8,192)f32 prim::Constant pnnx_12140 0 1 23844 value=1 prim::Constant pnnx_12141 0 1 23845 value=-1 prim::ListConstruct pnnx_12142 4 1 23844 932 1172 23845 12048 torch.permute torch.permute_2767 2 1 x9.49 12045 12046 $input=x9.49 $dims=12045 #x9.49=(1,6,6,8,8,192)f32 #12046=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_181 1 1 12046 12047 memory_format=torch.contiguous_format $input=12046 #12046=(1,6,8,6,8,192)f32 #12047=(1,6,8,6,8,192)f32 prim::Constant pnnx_12144 0 1 23846 value=4 prim::Constant pnnx_12145 0 1 23847 value=4 prim::ListConstruct pnnx_12146 2 1 23846 23847 12050 prim::Constant pnnx_12147 0 1 23848 value=1 prim::Constant pnnx_12148 0 1 23849 value=2 prim::ListConstruct pnnx_12149 2 1 23848 23849 12051 Tensor.view Tensor.view_1642 2 1 12047 12048 shifted_x.61 $input=12047 $shape=12048 #12047=(1,6,8,6,8,192)f32 #shifted_x.61=(1,48,48,192)f32 aten::mul pnnx_12151 2 1 H0.1 W0.1 12053 aten::Int pnnx_12152 1 1 12053 12054 prim::ListConstruct pnnx_12153 3 1 11921 12054 11925 12055 prim::Constant pnnx_12155 0 1 12057 value=None prim::Constant pnnx_12156 0 1 23850 value=1 torch.roll torch.roll_2479 3 1 shifted_x.61 12050 12051 x10.25 $input=shifted_x.61 $shifts=12050 $dims=12051 #shifted_x.61=(1,48,48,192)f32 #x10.25=(1,48,48,192)f32 Tensor.view Tensor.view_1643 2 1 x10.25 12055 x11.25 $input=x10.25 $shape=12055 #x10.25=(1,48,48,192)f32 #x11.25=(1,2304,192)f32 aten::add pnnx_12157 3 1 11900 x11.25 23850 input.271 #11900=(1,2304,192)f32 #x11.25=(1,2304,192)f32 #input.271=(1,2304,192)f32 nn.LayerNorm layers_dfe.3.residual_group.blocks.5.norm2 1 1 input.271 12059 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.271=(1,2304,192)f32 #12059=(1,2304,192)f32 nn.Linear layers_dfe.3.residual_group.blocks.5.mlp.fc1 1 1 12059 12064 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #12059=(1,2304,192)f32 #12064=(1,2304,384)f32 nn.GELU layers_dfe.3.residual_group.blocks.5.mlp.act 1 1 12064 12065 #12064=(1,2304,384)f32 #12065=(1,2304,384)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.mlp.drop 1 1 12065 12066 #12065=(1,2304,384)f32 #12066=(1,2304,384)f32 nn.Linear layers_dfe.3.residual_group.blocks.5.mlp.fc2 1 1 12066 12067 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #12066=(1,2304,384)f32 #12067=(1,2304,192)f32 nn.Dropout layers_dfe.3.residual_group.blocks.5.mlp.drop 1 1 12067 12068 #12067=(1,2304,192)f32 #12068=(1,2304,192)f32 prim::Constant pnnx_12158 0 1 12069 value=None prim::Constant pnnx_12159 0 1 23851 value=1 aten::add pnnx_12160 3 1 input.271 12068 23851 12070 #input.271=(1,2304,192)f32 #12068=(1,2304,192)f32 #12070=(1,2304,192)f32 prim::Constant pnnx_12161 0 1 12071 value=0 prim::Constant pnnx_12162 0 1 12072 value=1 prim::Constant pnnx_12163 0 1 12073 value=2 prim::Constant pnnx_12164 0 1 12074 value=192 aten::size pnnx_12165 2 1 12070 12071 12075 #12070=(1,2304,192)f32 prim::NumToTensor pnnx_12166 1 1 12075 B.145 aten::Int pnnx_12167 1 1 B.145 12077 prim::ListConstruct pnnx_12169 4 1 12077 12074 929 1169 12079 torch.transpose torch.transpose_3101 3 1 12070 12072 12073 12078 $input=12070 $dim0=12072 $dim1=12073 #12070=(1,2304,192)f32 #12078=(1,192,2304)f32 Tensor.view Tensor.view_1644 2 1 12078 12079 input.273 $input=12078 $shape=12079 #12078=(1,192,2304)f32 #input.273=(1,192,48,48)f32 nn.Conv2d layers_dfe.3.conv 1 1 input.273 12081 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.273=(1,192,48,48)f32 #12081=(1,192,48,48)f32 prim::Constant pnnx_12171 0 1 12082 value=-1 prim::Constant pnnx_12172 0 1 12083 value=2 prim::Constant pnnx_12173 0 1 12084 value=1 prim::Constant pnnx_12175 0 1 23852 value=2 torch.flatten torch.flatten_2194 3 1 12081 12083 12082 12085 $input=12081 $start_dim=12083 $end_dim=12082 #12081=(1,192,48,48)f32 #12085=(1,192,2304)f32 torch.transpose torch.transpose_3102 3 1 12085 12084 23852 12086 $input=12085 $dim0=12084 $dim1=23852 #12085=(1,192,2304)f32 #12086=(1,2304,192)f32 aten::add pnnx_12177 3 1 12086 11105 11106 12087 #12086=(1,2304,192)f32 #11105=(1,2304,192)f32 #12087=(1,2304,192)f32 prim::Constant pnnx_12178 0 1 12088 value=1 prim::Constant pnnx_12179 0 1 12105 value=trunc prim::Constant pnnx_12180 0 1 12106 value=8 prim::Constant pnnx_12181 0 1 12107 value=0 prim::Constant pnnx_12182 0 1 12108 value=2 prim::Constant pnnx_12183 0 1 12109 value=1 prim::Constant pnnx_12184 0 1 12110 value=3 prim::Constant pnnx_12185 0 1 12111 value=8 prim::Constant pnnx_12186 0 1 12112 value=4 prim::Constant pnnx_12187 0 1 12113 value=5 prim::Constant pnnx_12188 0 1 12114 value=-1 prim::Constant pnnx_12189 0 1 12115 value=64 aten::size pnnx_12190 2 1 12087 12107 12121 #12087=(1,2304,192)f32 prim::NumToTensor pnnx_12191 1 1 12121 B.147 aten::Int pnnx_12192 1 1 B.147 12123 aten::Int pnnx_12193 1 1 B.147 12124 aten::size pnnx_12194 2 1 12087 12108 12125 #12087=(1,2304,192)f32 prim::NumToTensor pnnx_12195 1 1 12125 C.251 aten::Int pnnx_12196 1 1 C.251 12127 aten::Int pnnx_12197 1 1 C.251 12128 aten::Int pnnx_12198 1 1 C.251 12129 aten::Int pnnx_12199 1 1 C.251 12130 nn.LayerNorm layers_dfe.4.residual_group.blocks.0.norm1 1 1 12087 12131 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #12087=(1,2304,192)f32 #12131=(1,2304,192)f32 prim::ListConstruct pnnx_12200 4 1 12124 926 1166 12130 12132 prim::Constant pnnx_12202 0 1 23853 value=0 Tensor.view Tensor.view_1645 2 1 12131 12132 x.123 $input=12131 $shape=12132 #12131=(1,2304,192)f32 #x.123=(1,48,48,192)f32 aten::size pnnx_12203 2 1 x.123 23853 12134 #x.123=(1,48,48,192)f32 prim::NumToTensor pnnx_12204 1 1 12134 B1.51 aten::Int pnnx_12205 1 1 B1.51 12136 aten::size pnnx_12206 2 1 x.123 12109 12137 #x.123=(1,48,48,192)f32 prim::NumToTensor pnnx_12207 1 1 12137 12138 prim::Constant pnnx_12208 0 1 23854 value=2 aten::size pnnx_12209 2 1 x.123 23854 12139 #x.123=(1,48,48,192)f32 prim::NumToTensor pnnx_12210 1 1 12139 12140 aten::size pnnx_12211 2 1 x.123 12110 12141 #x.123=(1,48,48,192)f32 prim::NumToTensor pnnx_12212 1 1 12141 C1.51 aten::Int pnnx_12213 1 1 C1.51 12143 aten::Int pnnx_12214 1 1 C1.51 12144 aten::div pnnx_12215 3 1 12138 12106 12105 12145 aten::Int pnnx_12216 1 1 12145 12146 prim::Constant pnnx_12217 0 1 23855 value=8 prim::Constant pnnx_12218 0 1 23856 value=trunc aten::div pnnx_12219 3 1 12140 23855 23856 12147 aten::Int pnnx_12220 1 1 12147 12148 prim::Constant pnnx_12221 0 1 23857 value=8 prim::ListConstruct pnnx_12222 6 1 12136 12146 12111 12148 23857 12144 12149 prim::Constant pnnx_12224 0 1 23858 value=0 prim::Constant pnnx_12225 0 1 23859 value=1 prim::Constant pnnx_12226 0 1 23860 value=3 prim::Constant pnnx_12227 0 1 23861 value=2 prim::ListConstruct pnnx_12228 6 1 23858 23859 23860 23861 12112 12113 12151 Tensor.view Tensor.view_1646 2 1 x.123 12149 x5.63 $input=x.123 $shape=12149 #x.123=(1,48,48,192)f32 #x5.63=(1,6,8,6,8,192)f32 prim::Constant pnnx_12232 0 1 23863 value=8 prim::Constant pnnx_12233 0 1 23864 value=8 prim::ListConstruct pnnx_12234 4 1 12114 23863 23864 12143 12154 torch.permute torch.permute_2768 2 1 x5.63 12151 12152 $input=x5.63 $dims=12151 #x5.63=(1,6,8,6,8,192)f32 #12152=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_182 1 1 12152 12153 memory_format=torch.contiguous_format $input=12152 #12152=(1,6,6,8,8,192)f32 #12153=(1,6,6,8,8,192)f32 prim::Constant pnnx_12236 0 1 23865 value=-1 prim::ListConstruct pnnx_12237 3 1 23865 12115 12129 12156 prim::Constant pnnx_12239 0 1 12158 value=1.767767e-01 prim::Constant pnnx_12240 0 1 12159 value=trunc prim::Constant pnnx_12241 0 1 12160 value=6 prim::Constant pnnx_12242 0 1 12161 value=0 prim::Constant pnnx_12243 0 1 12162 value=1 prim::Constant pnnx_12244 0 1 12163 value=2 prim::Constant pnnx_12245 0 1 12164 value=3 prim::Constant pnnx_12246 0 1 12165 value=6 prim::Constant pnnx_12247 0 1 12166 value=4 prim::Constant pnnx_12248 0 1 12167 value=-2 prim::Constant pnnx_12249 0 1 12168 value=-1 prim::Constant pnnx_12250 0 1 12169 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.0.attn 0 1 relative_position_bias_table.123 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.123=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.0.attn 0 1 relative_position_index.123 @relative_position_index=(64,64)i64 #relative_position_index.123=(64,64)i64 Tensor.view Tensor.view_1647 2 1 12153 12154 x_windows.123 $input=12153 $shape=12154 #12153=(1,6,6,8,8,192)f32 #x_windows.123=(36,8,8,192)f32 Tensor.view Tensor.view_1648 2 1 x_windows.123 12156 x6.51 $input=x_windows.123 $shape=12156 #x_windows.123=(36,8,8,192)f32 #x6.51=(36,64,192)f32 aten::size pnnx_12251 2 1 x6.51 12161 12177 #x6.51=(36,64,192)f32 prim::NumToTensor pnnx_12252 1 1 12177 B_.123 aten::Int pnnx_12253 1 1 B_.123 12179 aten::Int pnnx_12254 1 1 B_.123 12180 aten::size pnnx_12255 2 1 x6.51 12162 12181 #x6.51=(36,64,192)f32 prim::NumToTensor pnnx_12256 1 1 12181 N.123 aten::Int pnnx_12257 1 1 N.123 12183 aten::Int pnnx_12258 1 1 N.123 12184 aten::size pnnx_12259 2 1 x6.51 12163 12185 #x6.51=(36,64,192)f32 prim::NumToTensor pnnx_12260 1 1 12185 C.253 aten::Int pnnx_12261 1 1 C.253 12187 nn.Linear layers_dfe.4.residual_group.blocks.0.attn.qkv 1 1 x6.51 12188 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.51=(36,64,192)f32 #12188=(36,64,576)f32 aten::div pnnx_12262 3 1 C.253 12160 12159 12189 aten::Int pnnx_12263 1 1 12189 12190 prim::ListConstruct pnnx_12264 5 1 12180 12184 12164 12165 12190 12191 prim::Constant pnnx_12266 0 1 23866 value=2 prim::Constant pnnx_12267 0 1 23867 value=0 prim::Constant pnnx_12268 0 1 23868 value=3 prim::Constant pnnx_12269 0 1 23869 value=1 prim::ListConstruct pnnx_12270 5 1 23866 23867 23868 23869 12166 12193 Tensor.reshape Tensor.reshape_554 2 1 12188 12191 12192 $input=12188 $shape=12191 #12188=(36,64,576)f32 #12192=(36,64,3,6,32)f32 prim::Constant pnnx_12272 0 1 23870 value=0 prim::Constant pnnx_12273 0 1 23871 value=0 prim::Constant pnnx_12275 0 1 23872 value=0 prim::Constant pnnx_12276 0 1 23873 value=1 prim::Constant pnnx_12278 0 1 23874 value=0 prim::Constant pnnx_12279 0 1 23875 value=2 torch.permute torch.permute_2769 2 1 12192 12193 qkv1.51 $input=12192 $dims=12193 #12192=(36,64,3,6,32)f32 #qkv1.51=(3,36,6,64,32)f32 Tensor.select Tensor.select_830 3 1 qkv1.51 23870 23871 q.123 $input=qkv1.51 $dim=23870 $index=23871 #qkv1.51=(3,36,6,64,32)f32 #q.123=(36,6,64,32)f32 aten::mul pnnx_12281 2 1 q.123 12158 q1.51 #q.123=(36,6,64,32)f32 #q1.51=(36,6,64,32)f32 Tensor.select Tensor.select_831 3 1 qkv1.51 23872 23873 k.123 $input=qkv1.51 $dim=23872 $index=23873 #qkv1.51=(3,36,6,64,32)f32 #k.123=(36,6,64,32)f32 prim::Constant pnnx_12284 0 1 23876 value=-1 prim::ListConstruct pnnx_12285 1 1 23876 12201 Tensor.view Tensor.view_1649 2 1 relative_position_index.123 12201 12202 $input=relative_position_index.123 $shape=12201 #relative_position_index.123=(64,64)i64 #12202=(4096)i64 prim::ListConstruct pnnx_12287 1 1 12202 12203 #12202=(4096)i64 prim::Constant pnnx_12289 0 1 23877 value=64 prim::Constant pnnx_12290 0 1 23878 value=-1 prim::ListConstruct pnnx_12291 3 1 12169 23877 23878 12205 Tensor.index Tensor.index_386 2 1 relative_position_bias_table.123 12203 12204 $input=relative_position_bias_table.123 $expr=12203 #relative_position_bias_table.123=(225,6)f32 #12204=(4096,6)f32 prim::Constant pnnx_12293 0 1 23879 value=2 prim::Constant pnnx_12294 0 1 23880 value=0 prim::Constant pnnx_12295 0 1 23881 value=1 prim::ListConstruct pnnx_12296 3 1 23879 23880 23881 12207 Tensor.view Tensor.view_1650 2 1 12204 12205 relative_position_bias.123 $input=12204 $shape=12205 #12204=(4096,6)f32 #relative_position_bias.123=(64,64,6)f32 prim::Constant pnnx_12300 0 1 23883 value=0 torch.permute torch.permute_2770 2 1 relative_position_bias.123 12207 12208 $input=relative_position_bias.123 $dims=12207 #relative_position_bias.123=(64,64,6)f32 #12208=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_183 1 1 12208 relative_position_bias1.51 memory_format=torch.contiguous_format $input=12208 #12208=(6,64,64)f32 #relative_position_bias1.51=(6,64,64)f32 prim::Constant pnnx_12302 0 1 23884 value=1 torch.transpose torch.transpose_3103 3 1 k.123 12167 12168 12199 $input=k.123 $dim0=12167 $dim1=12168 #k.123=(36,6,64,32)f32 #12199=(36,6,32,64)f32 torch.matmul torch.matmul_2324 2 1 q1.51 12199 attn.247 $input=q1.51 $other=12199 #q1.51=(36,6,64,32)f32 #12199=(36,6,32,64)f32 #attn.247=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3338 2 1 relative_position_bias1.51 23883 12210 $input=relative_position_bias1.51 $dim=23883 #relative_position_bias1.51=(6,64,64)f32 #12210=(1,6,64,64)f32 aten::add pnnx_12303 3 1 attn.247 12210 23884 input.275 #attn.247=(36,6,64,64)f32 #12210=(1,6,64,64)f32 #input.275=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.0.attn.softmax 1 1 input.275 12212 dim=-1 #input.275=(36,6,64,64)f32 #12212=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.attn.attn_drop 1 1 12212 12213 #12212=(36,6,64,64)f32 #12213=(36,6,64,64)f32 Tensor.select Tensor.select_832 3 1 qkv1.51 23874 23875 v.123 $input=qkv1.51 $dim=23874 $index=23875 #qkv1.51=(3,36,6,64,32)f32 #v.123=(36,6,64,32)f32 prim::Constant pnnx_12305 0 1 23885 value=1 prim::Constant pnnx_12306 0 1 23886 value=2 torch.matmul torch.matmul_2325 2 1 12213 v.123 12214 $input=12213 $other=v.123 #12213=(36,6,64,64)f32 #v.123=(36,6,64,32)f32 #12214=(36,6,64,32)f32 prim::ListConstruct pnnx_12308 3 1 12179 12183 12187 12216 torch.transpose torch.transpose_3104 3 1 12214 23885 23886 12215 $input=12214 $dim0=23885 $dim1=23886 #12214=(36,6,64,32)f32 #12215=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_555 2 1 12215 12216 input1.53 $input=12215 $shape=12216 #12215=(36,64,6,32)f32 #input1.53=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.0.attn.proj 1 1 input1.53 12218 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.53=(36,64,192)f32 #12218=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.attn.proj_drop 1 1 12218 12219 #12218=(36,64,192)f32 #12219=(36,64,192)f32 prim::Constant pnnx_12310 0 1 23887 value=-1 prim::Constant pnnx_12311 0 1 23888 value=8 prim::Constant pnnx_12312 0 1 23889 value=8 prim::ListConstruct pnnx_12313 4 1 23887 23888 23889 12128 12220 prim::Constant pnnx_12315 0 1 23890 value=8 prim::Constant pnnx_12316 0 1 23891 value=trunc aten::div pnnx_12317 3 1 H0.1 23890 23891 12222 aten::Int pnnx_12318 1 1 12222 12223 prim::Constant pnnx_12319 0 1 23892 value=8 prim::Constant pnnx_12320 0 1 23893 value=trunc aten::div pnnx_12321 3 1 W0.1 23892 23893 12224 aten::Int pnnx_12322 1 1 12224 12225 prim::Constant pnnx_12323 0 1 23894 value=1 prim::Constant pnnx_12324 0 1 23895 value=8 prim::Constant pnnx_12325 0 1 23896 value=8 prim::Constant pnnx_12326 0 1 23897 value=-1 prim::ListConstruct pnnx_12327 6 1 23894 12223 12225 23895 23896 23897 12226 prim::Constant pnnx_12329 0 1 23898 value=0 prim::Constant pnnx_12330 0 1 23899 value=1 prim::Constant pnnx_12331 0 1 23900 value=3 prim::Constant pnnx_12332 0 1 23901 value=2 prim::Constant pnnx_12333 0 1 23902 value=4 prim::Constant pnnx_12334 0 1 23903 value=5 prim::ListConstruct pnnx_12335 6 1 23898 23899 23900 23901 23902 23903 12228 Tensor.view Tensor.view_1651 2 1 12219 12220 windows.123 $input=12219 $shape=12220 #12219=(36,64,192)f32 #windows.123=(36,8,8,192)f32 Tensor.view Tensor.view_1652 2 1 windows.123 12226 x7.51 $input=windows.123 $shape=12226 #windows.123=(36,8,8,192)f32 #x7.51=(1,6,6,8,8,192)f32 prim::Constant pnnx_12339 0 1 23905 value=1 prim::Constant pnnx_12340 0 1 23906 value=-1 prim::ListConstruct pnnx_12341 4 1 23905 923 1163 23906 12231 torch.permute torch.permute_2771 2 1 x7.51 12228 12229 $input=x7.51 $dims=12228 #x7.51=(1,6,6,8,8,192)f32 #12229=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_184 1 1 12229 12230 memory_format=torch.contiguous_format $input=12229 #12229=(1,6,8,6,8,192)f32 #12230=(1,6,8,6,8,192)f32 aten::mul pnnx_12343 2 1 H0.1 W0.1 12233 aten::Int pnnx_12344 1 1 12233 12234 prim::ListConstruct pnnx_12345 3 1 12123 12234 12127 12235 prim::Constant pnnx_12347 0 1 12237 value=None prim::Constant pnnx_12348 0 1 23907 value=1 Tensor.view Tensor.view_1653 2 1 12230 12231 x8.51 $input=12230 $shape=12231 #12230=(1,6,8,6,8,192)f32 #x8.51=(1,48,48,192)f32 Tensor.view Tensor.view_1654 2 1 x8.51 12235 x9.51 $input=x8.51 $shape=12235 #x8.51=(1,48,48,192)f32 #x9.51=(1,2304,192)f32 aten::add pnnx_12349 3 1 12087 x9.51 23907 input.277 #12087=(1,2304,192)f32 #x9.51=(1,2304,192)f32 #input.277=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.0.norm2 1 1 input.277 12239 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.277=(1,2304,192)f32 #12239=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.0.mlp.fc1 1 1 12239 12244 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #12239=(1,2304,192)f32 #12244=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.0.mlp.act 1 1 12244 12245 #12244=(1,2304,384)f32 #12245=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.mlp.drop 1 1 12245 12246 #12245=(1,2304,384)f32 #12246=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.0.mlp.fc2 1 1 12246 12247 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #12246=(1,2304,384)f32 #12247=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.0.mlp.drop 1 1 12247 12248 #12247=(1,2304,192)f32 #12248=(1,2304,192)f32 prim::Constant pnnx_12350 0 1 12249 value=None prim::Constant pnnx_12351 0 1 23908 value=1 aten::add pnnx_12352 3 1 input.277 12248 23908 12250 #input.277=(1,2304,192)f32 #12248=(1,2304,192)f32 #12250=(1,2304,192)f32 prim::Constant pnnx_12353 0 1 12251 value=trunc prim::Constant pnnx_12354 0 1 12252 value=8 prim::Constant pnnx_12355 0 1 12253 value=0 prim::Constant pnnx_12356 0 1 12254 value=2 prim::Constant pnnx_12357 0 1 12255 value=-4 prim::Constant pnnx_12358 0 1 12256 value=1 prim::Constant pnnx_12359 0 1 12257 value=3 prim::Constant pnnx_12360 0 1 12258 value=8 prim::Constant pnnx_12361 0 1 12259 value=4 prim::Constant pnnx_12362 0 1 12260 value=5 prim::Constant pnnx_12363 0 1 12261 value=-1 prim::Constant pnnx_12364 0 1 12262 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.1 0 1 attn_mask.63 @attn_mask=(36,64,64)f32 #attn_mask.63=(36,64,64)f32 aten::size pnnx_12365 2 1 12250 12253 12269 #12250=(1,2304,192)f32 prim::NumToTensor pnnx_12366 1 1 12269 B.149 aten::Int pnnx_12367 1 1 B.149 12271 aten::Int pnnx_12368 1 1 B.149 12272 aten::size pnnx_12369 2 1 12250 12254 12273 #12250=(1,2304,192)f32 prim::NumToTensor pnnx_12370 1 1 12273 C.255 aten::Int pnnx_12371 1 1 C.255 12275 aten::Int pnnx_12372 1 1 C.255 12276 aten::Int pnnx_12373 1 1 C.255 12277 aten::Int pnnx_12374 1 1 C.255 12278 nn.LayerNorm layers_dfe.4.residual_group.blocks.1.norm1 1 1 12250 12279 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #12250=(1,2304,192)f32 #12279=(1,2304,192)f32 prim::ListConstruct pnnx_12375 4 1 12272 920 1160 12278 12280 prim::Constant pnnx_12377 0 1 23909 value=-4 prim::ListConstruct pnnx_12378 2 1 12255 23909 12282 prim::Constant pnnx_12379 0 1 23910 value=2 prim::ListConstruct pnnx_12380 2 1 12256 23910 12283 Tensor.view Tensor.view_1655 2 1 12279 12280 x.125 $input=12279 $shape=12280 #12279=(1,2304,192)f32 #x.125=(1,48,48,192)f32 prim::Constant pnnx_12382 0 1 23911 value=0 torch.roll torch.roll_2480 3 1 x.125 12282 12283 x6.53 $input=x.125 $shifts=12282 $dims=12283 #x.125=(1,48,48,192)f32 #x6.53=(1,48,48,192)f32 aten::size pnnx_12383 2 1 x6.53 23911 12285 #x6.53=(1,48,48,192)f32 prim::NumToTensor pnnx_12384 1 1 12285 B1.53 aten::Int pnnx_12385 1 1 B1.53 12287 prim::Constant pnnx_12386 0 1 23912 value=1 aten::size pnnx_12387 2 1 x6.53 23912 12288 #x6.53=(1,48,48,192)f32 prim::NumToTensor pnnx_12388 1 1 12288 12289 prim::Constant pnnx_12389 0 1 23913 value=2 aten::size pnnx_12390 2 1 x6.53 23913 12290 #x6.53=(1,48,48,192)f32 prim::NumToTensor pnnx_12391 1 1 12290 12291 aten::size pnnx_12392 2 1 x6.53 12257 12292 #x6.53=(1,48,48,192)f32 prim::NumToTensor pnnx_12393 1 1 12292 C1.53 aten::Int pnnx_12394 1 1 C1.53 12294 aten::Int pnnx_12395 1 1 C1.53 12295 aten::div pnnx_12396 3 1 12289 12252 12251 12296 aten::Int pnnx_12397 1 1 12296 12297 prim::Constant pnnx_12398 0 1 23914 value=8 prim::Constant pnnx_12399 0 1 23915 value=trunc aten::div pnnx_12400 3 1 12291 23914 23915 12298 aten::Int pnnx_12401 1 1 12298 12299 prim::Constant pnnx_12402 0 1 23916 value=8 prim::ListConstruct pnnx_12403 6 1 12287 12297 12258 12299 23916 12295 12300 prim::Constant pnnx_12405 0 1 23917 value=0 prim::Constant pnnx_12406 0 1 23918 value=1 prim::Constant pnnx_12407 0 1 23919 value=3 prim::Constant pnnx_12408 0 1 23920 value=2 prim::ListConstruct pnnx_12409 6 1 23917 23918 23919 23920 12259 12260 12302 Tensor.view Tensor.view_1656 2 1 x6.53 12300 x7.53 $input=x6.53 $shape=12300 #x6.53=(1,48,48,192)f32 #x7.53=(1,6,8,6,8,192)f32 prim::Constant pnnx_12413 0 1 23922 value=8 prim::Constant pnnx_12414 0 1 23923 value=8 prim::ListConstruct pnnx_12415 4 1 12261 23922 23923 12294 12305 torch.permute torch.permute_2772 2 1 x7.53 12302 12303 $input=x7.53 $dims=12302 #x7.53=(1,6,8,6,8,192)f32 #12303=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_185 1 1 12303 12304 memory_format=torch.contiguous_format $input=12303 #12303=(1,6,6,8,8,192)f32 #12304=(1,6,6,8,8,192)f32 prim::Constant pnnx_12417 0 1 23924 value=-1 prim::ListConstruct pnnx_12418 3 1 23924 12262 12277 12307 prim::Constant pnnx_12420 0 1 12309 value=1.767767e-01 prim::Constant pnnx_12421 0 1 12310 value=trunc prim::Constant pnnx_12422 0 1 12311 value=6 prim::Constant pnnx_12423 0 1 12312 value=0 prim::Constant pnnx_12424 0 1 12313 value=1 prim::Constant pnnx_12425 0 1 12314 value=2 prim::Constant pnnx_12426 0 1 12315 value=3 prim::Constant pnnx_12427 0 1 12316 value=6 prim::Constant pnnx_12428 0 1 12317 value=4 prim::Constant pnnx_12429 0 1 12318 value=-2 prim::Constant pnnx_12430 0 1 12319 value=-1 prim::Constant pnnx_12431 0 1 12320 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.1.attn 0 1 relative_position_bias_table.125 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.125=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.1.attn 0 1 relative_position_index.125 @relative_position_index=(64,64)i64 #relative_position_index.125=(64,64)i64 Tensor.view Tensor.view_1657 2 1 12304 12305 x_windows.125 $input=12304 $shape=12305 #12304=(1,6,6,8,8,192)f32 #x_windows.125=(36,8,8,192)f32 Tensor.view Tensor.view_1658 2 1 x_windows.125 12307 x8.53 $input=x_windows.125 $shape=12307 #x_windows.125=(36,8,8,192)f32 #x8.53=(36,64,192)f32 aten::size pnnx_12432 2 1 x8.53 12312 12328 #x8.53=(36,64,192)f32 prim::NumToTensor pnnx_12433 1 1 12328 B_.125 aten::Int pnnx_12434 1 1 B_.125 12330 aten::Int pnnx_12435 1 1 B_.125 12331 aten::size pnnx_12436 2 1 x8.53 12313 12332 #x8.53=(36,64,192)f32 prim::NumToTensor pnnx_12437 1 1 12332 N.125 aten::Int pnnx_12438 1 1 N.125 12334 aten::Int pnnx_12439 1 1 N.125 12335 aten::Int pnnx_12440 1 1 N.125 12336 aten::Int pnnx_12441 1 1 N.125 12337 aten::Int pnnx_12442 1 1 N.125 12338 aten::Int pnnx_12443 1 1 N.125 12339 aten::size pnnx_12444 2 1 x8.53 12314 12340 #x8.53=(36,64,192)f32 prim::NumToTensor pnnx_12445 1 1 12340 C.257 aten::Int pnnx_12446 1 1 C.257 12342 nn.Linear layers_dfe.4.residual_group.blocks.1.attn.qkv 1 1 x8.53 12343 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.53=(36,64,192)f32 #12343=(36,64,576)f32 aten::div pnnx_12447 3 1 C.257 12311 12310 12344 aten::Int pnnx_12448 1 1 12344 12345 prim::ListConstruct pnnx_12449 5 1 12331 12339 12315 12316 12345 12346 prim::Constant pnnx_12451 0 1 23925 value=2 prim::Constant pnnx_12452 0 1 23926 value=0 prim::Constant pnnx_12453 0 1 23927 value=3 prim::Constant pnnx_12454 0 1 23928 value=1 prim::ListConstruct pnnx_12455 5 1 23925 23926 23927 23928 12317 12348 Tensor.reshape Tensor.reshape_556 2 1 12343 12346 12347 $input=12343 $shape=12346 #12343=(36,64,576)f32 #12347=(36,64,3,6,32)f32 prim::Constant pnnx_12457 0 1 23929 value=0 prim::Constant pnnx_12458 0 1 23930 value=0 prim::Constant pnnx_12460 0 1 23931 value=0 prim::Constant pnnx_12461 0 1 23932 value=1 prim::Constant pnnx_12463 0 1 23933 value=0 prim::Constant pnnx_12464 0 1 23934 value=2 torch.permute torch.permute_2773 2 1 12347 12348 qkv1.53 $input=12347 $dims=12348 #12347=(36,64,3,6,32)f32 #qkv1.53=(3,36,6,64,32)f32 Tensor.select Tensor.select_833 3 1 qkv1.53 23929 23930 q.125 $input=qkv1.53 $dim=23929 $index=23930 #qkv1.53=(3,36,6,64,32)f32 #q.125=(36,6,64,32)f32 aten::mul pnnx_12466 2 1 q.125 12309 q1.53 #q.125=(36,6,64,32)f32 #q1.53=(36,6,64,32)f32 Tensor.select Tensor.select_834 3 1 qkv1.53 23931 23932 k.125 $input=qkv1.53 $dim=23931 $index=23932 #qkv1.53=(3,36,6,64,32)f32 #k.125=(36,6,64,32)f32 prim::Constant pnnx_12469 0 1 23935 value=-1 prim::ListConstruct pnnx_12470 1 1 23935 12356 Tensor.view Tensor.view_1659 2 1 relative_position_index.125 12356 12357 $input=relative_position_index.125 $shape=12356 #relative_position_index.125=(64,64)i64 #12357=(4096)i64 prim::ListConstruct pnnx_12472 1 1 12357 12358 #12357=(4096)i64 prim::Constant pnnx_12474 0 1 23936 value=64 prim::Constant pnnx_12475 0 1 23937 value=-1 prim::ListConstruct pnnx_12476 3 1 12320 23936 23937 12360 Tensor.index Tensor.index_387 2 1 relative_position_bias_table.125 12358 12359 $input=relative_position_bias_table.125 $expr=12358 #relative_position_bias_table.125=(225,6)f32 #12359=(4096,6)f32 prim::Constant pnnx_12478 0 1 23938 value=2 prim::Constant pnnx_12479 0 1 23939 value=0 prim::Constant pnnx_12480 0 1 23940 value=1 prim::ListConstruct pnnx_12481 3 1 23938 23939 23940 12362 Tensor.view Tensor.view_1660 2 1 12359 12360 relative_position_bias.125 $input=12359 $shape=12360 #12359=(4096,6)f32 #relative_position_bias.125=(64,64,6)f32 prim::Constant pnnx_12485 0 1 23942 value=0 torch.permute torch.permute_2774 2 1 relative_position_bias.125 12362 12363 $input=relative_position_bias.125 $dims=12362 #relative_position_bias.125=(64,64,6)f32 #12363=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_186 1 1 12363 relative_position_bias1.53 memory_format=torch.contiguous_format $input=12363 #12363=(6,64,64)f32 #relative_position_bias1.53=(6,64,64)f32 prim::Constant pnnx_12487 0 1 23943 value=1 torch.transpose torch.transpose_3105 3 1 k.125 12318 12319 12354 $input=k.125 $dim0=12318 $dim1=12319 #k.125=(36,6,64,32)f32 #12354=(36,6,32,64)f32 torch.matmul torch.matmul_2326 2 1 q1.53 12354 attn.251 $input=q1.53 $other=12354 #q1.53=(36,6,64,32)f32 #12354=(36,6,32,64)f32 #attn.251=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3339 2 1 relative_position_bias1.53 23942 12365 $input=relative_position_bias1.53 $dim=23942 #relative_position_bias1.53=(6,64,64)f32 #12365=(1,6,64,64)f32 aten::add pnnx_12488 3 1 attn.251 12365 23943 attn2.27 #attn.251=(36,6,64,64)f32 #12365=(1,6,64,64)f32 #attn2.27=(36,6,64,64)f32 prim::Constant pnnx_12489 0 1 23944 value=0 aten::size pnnx_12490 2 1 attn_mask.63 23944 12367 #attn_mask.63=(36,64,64)f32 prim::NumToTensor pnnx_12491 1 1 12367 other.63 aten::Int pnnx_12492 1 1 other.63 12369 prim::Constant pnnx_12493 0 1 23945 value=trunc aten::div pnnx_12494 3 1 B_.125 other.63 23945 12370 aten::Int pnnx_12495 1 1 12370 12371 prim::Constant pnnx_12496 0 1 23946 value=6 prim::ListConstruct pnnx_12497 5 1 12371 12369 23946 12338 12337 12372 prim::Constant pnnx_12499 0 1 23947 value=1 prim::Constant pnnx_12501 0 1 23948 value=0 prim::Constant pnnx_12503 0 1 23949 value=1 Tensor.view Tensor.view_1661 2 1 attn2.27 12372 12373 $input=attn2.27 $shape=12372 #attn2.27=(36,6,64,64)f32 #12373=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3340 2 1 attn_mask.63 23947 12374 $input=attn_mask.63 $dim=23947 #attn_mask.63=(36,64,64)f32 #12374=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3341 2 1 12374 23948 12375 $input=12374 $dim=23948 #12374=(36,1,64,64)f32 #12375=(1,36,1,64,64)f32 aten::add pnnx_12504 3 1 12373 12375 23949 attn3.27 #12373=(1,36,6,64,64)f32 #12375=(1,36,1,64,64)f32 #attn3.27=(1,36,6,64,64)f32 prim::Constant pnnx_12505 0 1 23950 value=-1 prim::Constant pnnx_12506 0 1 23951 value=6 prim::ListConstruct pnnx_12507 4 1 23950 23951 12336 12335 12377 Tensor.view Tensor.view_1662 2 1 attn3.27 12377 input.279 $input=attn3.27 $shape=12377 #attn3.27=(1,36,6,64,64)f32 #input.279=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.1.attn.softmax 1 1 input.279 12379 dim=-1 #input.279=(36,6,64,64)f32 #12379=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.attn.attn_drop 1 1 12379 12380 #12379=(36,6,64,64)f32 #12380=(36,6,64,64)f32 Tensor.select Tensor.select_835 3 1 qkv1.53 23933 23934 v.125 $input=qkv1.53 $dim=23933 $index=23934 #qkv1.53=(3,36,6,64,32)f32 #v.125=(36,6,64,32)f32 prim::Constant pnnx_12510 0 1 23952 value=1 prim::Constant pnnx_12511 0 1 23953 value=2 torch.matmul torch.matmul_2327 2 1 12380 v.125 12381 $input=12380 $other=v.125 #12380=(36,6,64,64)f32 #v.125=(36,6,64,32)f32 #12381=(36,6,64,32)f32 prim::ListConstruct pnnx_12513 3 1 12330 12334 12342 12383 torch.transpose torch.transpose_3106 3 1 12381 23952 23953 12382 $input=12381 $dim0=23952 $dim1=23953 #12381=(36,6,64,32)f32 #12382=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_557 2 1 12382 12383 input1.55 $input=12382 $shape=12383 #12382=(36,64,6,32)f32 #input1.55=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.1.attn.proj 1 1 input1.55 12385 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.55=(36,64,192)f32 #12385=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.attn.proj_drop 1 1 12385 12386 #12385=(36,64,192)f32 #12386=(36,64,192)f32 prim::Constant pnnx_12515 0 1 23954 value=-1 prim::Constant pnnx_12516 0 1 23955 value=8 prim::Constant pnnx_12517 0 1 23956 value=8 prim::ListConstruct pnnx_12518 4 1 23954 23955 23956 12276 12387 prim::Constant pnnx_12520 0 1 23957 value=8 prim::Constant pnnx_12521 0 1 23958 value=trunc aten::div pnnx_12522 3 1 H0.1 23957 23958 12389 aten::Int pnnx_12523 1 1 12389 12390 prim::Constant pnnx_12524 0 1 23959 value=8 prim::Constant pnnx_12525 0 1 23960 value=trunc aten::div pnnx_12526 3 1 W0.1 23959 23960 12391 aten::Int pnnx_12527 1 1 12391 12392 prim::Constant pnnx_12528 0 1 23961 value=1 prim::Constant pnnx_12529 0 1 23962 value=8 prim::Constant pnnx_12530 0 1 23963 value=8 prim::Constant pnnx_12531 0 1 23964 value=-1 prim::ListConstruct pnnx_12532 6 1 23961 12390 12392 23962 23963 23964 12393 prim::Constant pnnx_12534 0 1 23965 value=0 prim::Constant pnnx_12535 0 1 23966 value=1 prim::Constant pnnx_12536 0 1 23967 value=3 prim::Constant pnnx_12537 0 1 23968 value=2 prim::Constant pnnx_12538 0 1 23969 value=4 prim::Constant pnnx_12539 0 1 23970 value=5 prim::ListConstruct pnnx_12540 6 1 23965 23966 23967 23968 23969 23970 12395 Tensor.view Tensor.view_1663 2 1 12386 12387 windows.125 $input=12386 $shape=12387 #12386=(36,64,192)f32 #windows.125=(36,8,8,192)f32 Tensor.view Tensor.view_1664 2 1 windows.125 12393 x9.53 $input=windows.125 $shape=12393 #windows.125=(36,8,8,192)f32 #x9.53=(1,6,6,8,8,192)f32 prim::Constant pnnx_12544 0 1 23972 value=1 prim::Constant pnnx_12545 0 1 23973 value=-1 prim::ListConstruct pnnx_12546 4 1 23972 917 1157 23973 12398 torch.permute torch.permute_2775 2 1 x9.53 12395 12396 $input=x9.53 $dims=12395 #x9.53=(1,6,6,8,8,192)f32 #12396=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_187 1 1 12396 12397 memory_format=torch.contiguous_format $input=12396 #12396=(1,6,8,6,8,192)f32 #12397=(1,6,8,6,8,192)f32 prim::Constant pnnx_12548 0 1 23974 value=4 prim::Constant pnnx_12549 0 1 23975 value=4 prim::ListConstruct pnnx_12550 2 1 23974 23975 12400 prim::Constant pnnx_12551 0 1 23976 value=1 prim::Constant pnnx_12552 0 1 23977 value=2 prim::ListConstruct pnnx_12553 2 1 23976 23977 12401 Tensor.view Tensor.view_1665 2 1 12397 12398 shifted_x.63 $input=12397 $shape=12398 #12397=(1,6,8,6,8,192)f32 #shifted_x.63=(1,48,48,192)f32 aten::mul pnnx_12555 2 1 H0.1 W0.1 12403 aten::Int pnnx_12556 1 1 12403 12404 prim::ListConstruct pnnx_12557 3 1 12271 12404 12275 12405 prim::Constant pnnx_12559 0 1 12407 value=None prim::Constant pnnx_12560 0 1 23978 value=1 torch.roll torch.roll_2481 3 1 shifted_x.63 12400 12401 x10.27 $input=shifted_x.63 $shifts=12400 $dims=12401 #shifted_x.63=(1,48,48,192)f32 #x10.27=(1,48,48,192)f32 Tensor.view Tensor.view_1666 2 1 x10.27 12405 x11.27 $input=x10.27 $shape=12405 #x10.27=(1,48,48,192)f32 #x11.27=(1,2304,192)f32 aten::add pnnx_12561 3 1 12250 x11.27 23978 input.281 #12250=(1,2304,192)f32 #x11.27=(1,2304,192)f32 #input.281=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.1.norm2 1 1 input.281 12409 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.281=(1,2304,192)f32 #12409=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.1.mlp.fc1 1 1 12409 12414 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #12409=(1,2304,192)f32 #12414=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.1.mlp.act 1 1 12414 12415 #12414=(1,2304,384)f32 #12415=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.mlp.drop 1 1 12415 12416 #12415=(1,2304,384)f32 #12416=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.1.mlp.fc2 1 1 12416 12417 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #12416=(1,2304,384)f32 #12417=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.1.mlp.drop 1 1 12417 12418 #12417=(1,2304,192)f32 #12418=(1,2304,192)f32 prim::Constant pnnx_12562 0 1 12419 value=None prim::Constant pnnx_12563 0 1 23979 value=1 aten::add pnnx_12564 3 1 input.281 12418 23979 12420 #input.281=(1,2304,192)f32 #12418=(1,2304,192)f32 #12420=(1,2304,192)f32 prim::Constant pnnx_12565 0 1 12421 value=trunc prim::Constant pnnx_12566 0 1 12422 value=8 prim::Constant pnnx_12567 0 1 12423 value=0 prim::Constant pnnx_12568 0 1 12424 value=2 prim::Constant pnnx_12569 0 1 12425 value=1 prim::Constant pnnx_12570 0 1 12426 value=3 prim::Constant pnnx_12571 0 1 12427 value=8 prim::Constant pnnx_12572 0 1 12428 value=4 prim::Constant pnnx_12573 0 1 12429 value=5 prim::Constant pnnx_12574 0 1 12430 value=-1 prim::Constant pnnx_12575 0 1 12431 value=64 aten::size pnnx_12576 2 1 12420 12423 12437 #12420=(1,2304,192)f32 prim::NumToTensor pnnx_12577 1 1 12437 B.151 aten::Int pnnx_12578 1 1 B.151 12439 aten::Int pnnx_12579 1 1 B.151 12440 aten::size pnnx_12580 2 1 12420 12424 12441 #12420=(1,2304,192)f32 prim::NumToTensor pnnx_12581 1 1 12441 C.259 aten::Int pnnx_12582 1 1 C.259 12443 aten::Int pnnx_12583 1 1 C.259 12444 aten::Int pnnx_12584 1 1 C.259 12445 aten::Int pnnx_12585 1 1 C.259 12446 nn.LayerNorm layers_dfe.4.residual_group.blocks.2.norm1 1 1 12420 12447 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #12420=(1,2304,192)f32 #12447=(1,2304,192)f32 prim::ListConstruct pnnx_12586 4 1 12440 914 1154 12446 12448 prim::Constant pnnx_12588 0 1 23980 value=0 Tensor.view Tensor.view_1667 2 1 12447 12448 x.127 $input=12447 $shape=12448 #12447=(1,2304,192)f32 #x.127=(1,48,48,192)f32 aten::size pnnx_12589 2 1 x.127 23980 12450 #x.127=(1,48,48,192)f32 prim::NumToTensor pnnx_12590 1 1 12450 B1.55 aten::Int pnnx_12591 1 1 B1.55 12452 aten::size pnnx_12592 2 1 x.127 12425 12453 #x.127=(1,48,48,192)f32 prim::NumToTensor pnnx_12593 1 1 12453 12454 prim::Constant pnnx_12594 0 1 23981 value=2 aten::size pnnx_12595 2 1 x.127 23981 12455 #x.127=(1,48,48,192)f32 prim::NumToTensor pnnx_12596 1 1 12455 12456 aten::size pnnx_12597 2 1 x.127 12426 12457 #x.127=(1,48,48,192)f32 prim::NumToTensor pnnx_12598 1 1 12457 C1.55 aten::Int pnnx_12599 1 1 C1.55 12459 aten::Int pnnx_12600 1 1 C1.55 12460 aten::div pnnx_12601 3 1 12454 12422 12421 12461 aten::Int pnnx_12602 1 1 12461 12462 prim::Constant pnnx_12603 0 1 23982 value=8 prim::Constant pnnx_12604 0 1 23983 value=trunc aten::div pnnx_12605 3 1 12456 23982 23983 12463 aten::Int pnnx_12606 1 1 12463 12464 prim::Constant pnnx_12607 0 1 23984 value=8 prim::ListConstruct pnnx_12608 6 1 12452 12462 12427 12464 23984 12460 12465 prim::Constant pnnx_12610 0 1 23985 value=0 prim::Constant pnnx_12611 0 1 23986 value=1 prim::Constant pnnx_12612 0 1 23987 value=3 prim::Constant pnnx_12613 0 1 23988 value=2 prim::ListConstruct pnnx_12614 6 1 23985 23986 23987 23988 12428 12429 12467 Tensor.view Tensor.view_1668 2 1 x.127 12465 x5.65 $input=x.127 $shape=12465 #x.127=(1,48,48,192)f32 #x5.65=(1,6,8,6,8,192)f32 prim::Constant pnnx_12618 0 1 23990 value=8 prim::Constant pnnx_12619 0 1 23991 value=8 prim::ListConstruct pnnx_12620 4 1 12430 23990 23991 12459 12470 torch.permute torch.permute_2776 2 1 x5.65 12467 12468 $input=x5.65 $dims=12467 #x5.65=(1,6,8,6,8,192)f32 #12468=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_188 1 1 12468 12469 memory_format=torch.contiguous_format $input=12468 #12468=(1,6,6,8,8,192)f32 #12469=(1,6,6,8,8,192)f32 prim::Constant pnnx_12622 0 1 23992 value=-1 prim::ListConstruct pnnx_12623 3 1 23992 12431 12445 12472 prim::Constant pnnx_12625 0 1 12474 value=1.767767e-01 prim::Constant pnnx_12626 0 1 12475 value=trunc prim::Constant pnnx_12627 0 1 12476 value=6 prim::Constant pnnx_12628 0 1 12477 value=0 prim::Constant pnnx_12629 0 1 12478 value=1 prim::Constant pnnx_12630 0 1 12479 value=2 prim::Constant pnnx_12631 0 1 12480 value=3 prim::Constant pnnx_12632 0 1 12481 value=6 prim::Constant pnnx_12633 0 1 12482 value=4 prim::Constant pnnx_12634 0 1 12483 value=-2 prim::Constant pnnx_12635 0 1 12484 value=-1 prim::Constant pnnx_12636 0 1 12485 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.2.attn 0 1 relative_position_bias_table.127 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.127=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.2.attn 0 1 relative_position_index.127 @relative_position_index=(64,64)i64 #relative_position_index.127=(64,64)i64 Tensor.view Tensor.view_1669 2 1 12469 12470 x_windows.127 $input=12469 $shape=12470 #12469=(1,6,6,8,8,192)f32 #x_windows.127=(36,8,8,192)f32 Tensor.view Tensor.view_1670 2 1 x_windows.127 12472 x6.55 $input=x_windows.127 $shape=12472 #x_windows.127=(36,8,8,192)f32 #x6.55=(36,64,192)f32 aten::size pnnx_12637 2 1 x6.55 12477 12493 #x6.55=(36,64,192)f32 prim::NumToTensor pnnx_12638 1 1 12493 B_.127 aten::Int pnnx_12639 1 1 B_.127 12495 aten::Int pnnx_12640 1 1 B_.127 12496 aten::size pnnx_12641 2 1 x6.55 12478 12497 #x6.55=(36,64,192)f32 prim::NumToTensor pnnx_12642 1 1 12497 N.127 aten::Int pnnx_12643 1 1 N.127 12499 aten::Int pnnx_12644 1 1 N.127 12500 aten::size pnnx_12645 2 1 x6.55 12479 12501 #x6.55=(36,64,192)f32 prim::NumToTensor pnnx_12646 1 1 12501 C.261 aten::Int pnnx_12647 1 1 C.261 12503 nn.Linear layers_dfe.4.residual_group.blocks.2.attn.qkv 1 1 x6.55 12504 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.55=(36,64,192)f32 #12504=(36,64,576)f32 aten::div pnnx_12648 3 1 C.261 12476 12475 12505 aten::Int pnnx_12649 1 1 12505 12506 prim::ListConstruct pnnx_12650 5 1 12496 12500 12480 12481 12506 12507 prim::Constant pnnx_12652 0 1 23993 value=2 prim::Constant pnnx_12653 0 1 23994 value=0 prim::Constant pnnx_12654 0 1 23995 value=3 prim::Constant pnnx_12655 0 1 23996 value=1 prim::ListConstruct pnnx_12656 5 1 23993 23994 23995 23996 12482 12509 Tensor.reshape Tensor.reshape_558 2 1 12504 12507 12508 $input=12504 $shape=12507 #12504=(36,64,576)f32 #12508=(36,64,3,6,32)f32 prim::Constant pnnx_12658 0 1 23997 value=0 prim::Constant pnnx_12659 0 1 23998 value=0 prim::Constant pnnx_12661 0 1 23999 value=0 prim::Constant pnnx_12662 0 1 24000 value=1 prim::Constant pnnx_12664 0 1 24001 value=0 prim::Constant pnnx_12665 0 1 24002 value=2 torch.permute torch.permute_2777 2 1 12508 12509 qkv1.55 $input=12508 $dims=12509 #12508=(36,64,3,6,32)f32 #qkv1.55=(3,36,6,64,32)f32 Tensor.select Tensor.select_836 3 1 qkv1.55 23997 23998 q.127 $input=qkv1.55 $dim=23997 $index=23998 #qkv1.55=(3,36,6,64,32)f32 #q.127=(36,6,64,32)f32 aten::mul pnnx_12667 2 1 q.127 12474 q1.55 #q.127=(36,6,64,32)f32 #q1.55=(36,6,64,32)f32 Tensor.select Tensor.select_837 3 1 qkv1.55 23999 24000 k.127 $input=qkv1.55 $dim=23999 $index=24000 #qkv1.55=(3,36,6,64,32)f32 #k.127=(36,6,64,32)f32 prim::Constant pnnx_12670 0 1 24003 value=-1 prim::ListConstruct pnnx_12671 1 1 24003 12517 Tensor.view Tensor.view_1671 2 1 relative_position_index.127 12517 12518 $input=relative_position_index.127 $shape=12517 #relative_position_index.127=(64,64)i64 #12518=(4096)i64 prim::ListConstruct pnnx_12673 1 1 12518 12519 #12518=(4096)i64 prim::Constant pnnx_12675 0 1 24004 value=64 prim::Constant pnnx_12676 0 1 24005 value=-1 prim::ListConstruct pnnx_12677 3 1 12485 24004 24005 12521 Tensor.index Tensor.index_388 2 1 relative_position_bias_table.127 12519 12520 $input=relative_position_bias_table.127 $expr=12519 #relative_position_bias_table.127=(225,6)f32 #12520=(4096,6)f32 prim::Constant pnnx_12679 0 1 24006 value=2 prim::Constant pnnx_12680 0 1 24007 value=0 prim::Constant pnnx_12681 0 1 24008 value=1 prim::ListConstruct pnnx_12682 3 1 24006 24007 24008 12523 Tensor.view Tensor.view_1672 2 1 12520 12521 relative_position_bias.127 $input=12520 $shape=12521 #12520=(4096,6)f32 #relative_position_bias.127=(64,64,6)f32 prim::Constant pnnx_12686 0 1 24010 value=0 torch.permute torch.permute_2778 2 1 relative_position_bias.127 12523 12524 $input=relative_position_bias.127 $dims=12523 #relative_position_bias.127=(64,64,6)f32 #12524=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_189 1 1 12524 relative_position_bias1.55 memory_format=torch.contiguous_format $input=12524 #12524=(6,64,64)f32 #relative_position_bias1.55=(6,64,64)f32 prim::Constant pnnx_12688 0 1 24011 value=1 torch.transpose torch.transpose_3107 3 1 k.127 12483 12484 12515 $input=k.127 $dim0=12483 $dim1=12484 #k.127=(36,6,64,32)f32 #12515=(36,6,32,64)f32 torch.matmul torch.matmul_2328 2 1 q1.55 12515 attn.255 $input=q1.55 $other=12515 #q1.55=(36,6,64,32)f32 #12515=(36,6,32,64)f32 #attn.255=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3342 2 1 relative_position_bias1.55 24010 12526 $input=relative_position_bias1.55 $dim=24010 #relative_position_bias1.55=(6,64,64)f32 #12526=(1,6,64,64)f32 aten::add pnnx_12689 3 1 attn.255 12526 24011 input.283 #attn.255=(36,6,64,64)f32 #12526=(1,6,64,64)f32 #input.283=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.2.attn.softmax 1 1 input.283 12528 dim=-1 #input.283=(36,6,64,64)f32 #12528=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.attn.attn_drop 1 1 12528 12529 #12528=(36,6,64,64)f32 #12529=(36,6,64,64)f32 Tensor.select Tensor.select_838 3 1 qkv1.55 24001 24002 v.127 $input=qkv1.55 $dim=24001 $index=24002 #qkv1.55=(3,36,6,64,32)f32 #v.127=(36,6,64,32)f32 prim::Constant pnnx_12691 0 1 24012 value=1 prim::Constant pnnx_12692 0 1 24013 value=2 torch.matmul torch.matmul_2329 2 1 12529 v.127 12530 $input=12529 $other=v.127 #12529=(36,6,64,64)f32 #v.127=(36,6,64,32)f32 #12530=(36,6,64,32)f32 prim::ListConstruct pnnx_12694 3 1 12495 12499 12503 12532 torch.transpose torch.transpose_3108 3 1 12530 24012 24013 12531 $input=12530 $dim0=24012 $dim1=24013 #12530=(36,6,64,32)f32 #12531=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_559 2 1 12531 12532 input1.57 $input=12531 $shape=12532 #12531=(36,64,6,32)f32 #input1.57=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.2.attn.proj 1 1 input1.57 12534 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.57=(36,64,192)f32 #12534=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.attn.proj_drop 1 1 12534 12535 #12534=(36,64,192)f32 #12535=(36,64,192)f32 prim::Constant pnnx_12696 0 1 24014 value=-1 prim::Constant pnnx_12697 0 1 24015 value=8 prim::Constant pnnx_12698 0 1 24016 value=8 prim::ListConstruct pnnx_12699 4 1 24014 24015 24016 12444 12536 prim::Constant pnnx_12701 0 1 24017 value=8 prim::Constant pnnx_12702 0 1 24018 value=trunc aten::div pnnx_12703 3 1 H0.1 24017 24018 12538 aten::Int pnnx_12704 1 1 12538 12539 prim::Constant pnnx_12705 0 1 24019 value=8 prim::Constant pnnx_12706 0 1 24020 value=trunc aten::div pnnx_12707 3 1 W0.1 24019 24020 12540 aten::Int pnnx_12708 1 1 12540 12541 prim::Constant pnnx_12709 0 1 24021 value=1 prim::Constant pnnx_12710 0 1 24022 value=8 prim::Constant pnnx_12711 0 1 24023 value=8 prim::Constant pnnx_12712 0 1 24024 value=-1 prim::ListConstruct pnnx_12713 6 1 24021 12539 12541 24022 24023 24024 12542 prim::Constant pnnx_12715 0 1 24025 value=0 prim::Constant pnnx_12716 0 1 24026 value=1 prim::Constant pnnx_12717 0 1 24027 value=3 prim::Constant pnnx_12718 0 1 24028 value=2 prim::Constant pnnx_12719 0 1 24029 value=4 prim::Constant pnnx_12720 0 1 24030 value=5 prim::ListConstruct pnnx_12721 6 1 24025 24026 24027 24028 24029 24030 12544 Tensor.view Tensor.view_1673 2 1 12535 12536 windows.127 $input=12535 $shape=12536 #12535=(36,64,192)f32 #windows.127=(36,8,8,192)f32 Tensor.view Tensor.view_1674 2 1 windows.127 12542 x7.55 $input=windows.127 $shape=12542 #windows.127=(36,8,8,192)f32 #x7.55=(1,6,6,8,8,192)f32 prim::Constant pnnx_12725 0 1 24032 value=1 prim::Constant pnnx_12726 0 1 24033 value=-1 prim::ListConstruct pnnx_12727 4 1 24032 911 1151 24033 12547 torch.permute torch.permute_2779 2 1 x7.55 12544 12545 $input=x7.55 $dims=12544 #x7.55=(1,6,6,8,8,192)f32 #12545=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_190 1 1 12545 12546 memory_format=torch.contiguous_format $input=12545 #12545=(1,6,8,6,8,192)f32 #12546=(1,6,8,6,8,192)f32 aten::mul pnnx_12729 2 1 H0.1 W0.1 12549 aten::Int pnnx_12730 1 1 12549 12550 prim::ListConstruct pnnx_12731 3 1 12439 12550 12443 12551 prim::Constant pnnx_12733 0 1 12553 value=None prim::Constant pnnx_12734 0 1 24034 value=1 Tensor.view Tensor.view_1675 2 1 12546 12547 x8.55 $input=12546 $shape=12547 #12546=(1,6,8,6,8,192)f32 #x8.55=(1,48,48,192)f32 Tensor.view Tensor.view_1676 2 1 x8.55 12551 x9.55 $input=x8.55 $shape=12551 #x8.55=(1,48,48,192)f32 #x9.55=(1,2304,192)f32 aten::add pnnx_12735 3 1 12420 x9.55 24034 input.285 #12420=(1,2304,192)f32 #x9.55=(1,2304,192)f32 #input.285=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.2.norm2 1 1 input.285 12555 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.285=(1,2304,192)f32 #12555=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.2.mlp.fc1 1 1 12555 12560 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #12555=(1,2304,192)f32 #12560=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.2.mlp.act 1 1 12560 12561 #12560=(1,2304,384)f32 #12561=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.mlp.drop 1 1 12561 12562 #12561=(1,2304,384)f32 #12562=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.2.mlp.fc2 1 1 12562 12563 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #12562=(1,2304,384)f32 #12563=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.2.mlp.drop 1 1 12563 12564 #12563=(1,2304,192)f32 #12564=(1,2304,192)f32 prim::Constant pnnx_12736 0 1 12565 value=None prim::Constant pnnx_12737 0 1 24035 value=1 aten::add pnnx_12738 3 1 input.285 12564 24035 12566 #input.285=(1,2304,192)f32 #12564=(1,2304,192)f32 #12566=(1,2304,192)f32 prim::Constant pnnx_12739 0 1 12567 value=trunc prim::Constant pnnx_12740 0 1 12568 value=8 prim::Constant pnnx_12741 0 1 12569 value=0 prim::Constant pnnx_12742 0 1 12570 value=2 prim::Constant pnnx_12743 0 1 12571 value=-4 prim::Constant pnnx_12744 0 1 12572 value=1 prim::Constant pnnx_12745 0 1 12573 value=3 prim::Constant pnnx_12746 0 1 12574 value=8 prim::Constant pnnx_12747 0 1 12575 value=4 prim::Constant pnnx_12748 0 1 12576 value=5 prim::Constant pnnx_12749 0 1 12577 value=-1 prim::Constant pnnx_12750 0 1 12578 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.3 0 1 attn_mask.65 @attn_mask=(36,64,64)f32 #attn_mask.65=(36,64,64)f32 aten::size pnnx_12751 2 1 12566 12569 12585 #12566=(1,2304,192)f32 prim::NumToTensor pnnx_12752 1 1 12585 B.153 aten::Int pnnx_12753 1 1 B.153 12587 aten::Int pnnx_12754 1 1 B.153 12588 aten::size pnnx_12755 2 1 12566 12570 12589 #12566=(1,2304,192)f32 prim::NumToTensor pnnx_12756 1 1 12589 C.263 aten::Int pnnx_12757 1 1 C.263 12591 aten::Int pnnx_12758 1 1 C.263 12592 aten::Int pnnx_12759 1 1 C.263 12593 aten::Int pnnx_12760 1 1 C.263 12594 nn.LayerNorm layers_dfe.4.residual_group.blocks.3.norm1 1 1 12566 12595 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #12566=(1,2304,192)f32 #12595=(1,2304,192)f32 prim::ListConstruct pnnx_12761 4 1 12588 908 1148 12594 12596 prim::Constant pnnx_12763 0 1 24036 value=-4 prim::ListConstruct pnnx_12764 2 1 12571 24036 12598 prim::Constant pnnx_12765 0 1 24037 value=2 prim::ListConstruct pnnx_12766 2 1 12572 24037 12599 Tensor.view Tensor.view_1677 2 1 12595 12596 x.129 $input=12595 $shape=12596 #12595=(1,2304,192)f32 #x.129=(1,48,48,192)f32 prim::Constant pnnx_12768 0 1 24038 value=0 torch.roll torch.roll_2482 3 1 x.129 12598 12599 x6.57 $input=x.129 $shifts=12598 $dims=12599 #x.129=(1,48,48,192)f32 #x6.57=(1,48,48,192)f32 aten::size pnnx_12769 2 1 x6.57 24038 12601 #x6.57=(1,48,48,192)f32 prim::NumToTensor pnnx_12770 1 1 12601 B1.57 aten::Int pnnx_12771 1 1 B1.57 12603 prim::Constant pnnx_12772 0 1 24039 value=1 aten::size pnnx_12773 2 1 x6.57 24039 12604 #x6.57=(1,48,48,192)f32 prim::NumToTensor pnnx_12774 1 1 12604 12605 prim::Constant pnnx_12775 0 1 24040 value=2 aten::size pnnx_12776 2 1 x6.57 24040 12606 #x6.57=(1,48,48,192)f32 prim::NumToTensor pnnx_12777 1 1 12606 12607 aten::size pnnx_12778 2 1 x6.57 12573 12608 #x6.57=(1,48,48,192)f32 prim::NumToTensor pnnx_12779 1 1 12608 C1.57 aten::Int pnnx_12780 1 1 C1.57 12610 aten::Int pnnx_12781 1 1 C1.57 12611 aten::div pnnx_12782 3 1 12605 12568 12567 12612 aten::Int pnnx_12783 1 1 12612 12613 prim::Constant pnnx_12784 0 1 24041 value=8 prim::Constant pnnx_12785 0 1 24042 value=trunc aten::div pnnx_12786 3 1 12607 24041 24042 12614 aten::Int pnnx_12787 1 1 12614 12615 prim::Constant pnnx_12788 0 1 24043 value=8 prim::ListConstruct pnnx_12789 6 1 12603 12613 12574 12615 24043 12611 12616 prim::Constant pnnx_12791 0 1 24044 value=0 prim::Constant pnnx_12792 0 1 24045 value=1 prim::Constant pnnx_12793 0 1 24046 value=3 prim::Constant pnnx_12794 0 1 24047 value=2 prim::ListConstruct pnnx_12795 6 1 24044 24045 24046 24047 12575 12576 12618 Tensor.view Tensor.view_1678 2 1 x6.57 12616 x7.57 $input=x6.57 $shape=12616 #x6.57=(1,48,48,192)f32 #x7.57=(1,6,8,6,8,192)f32 prim::Constant pnnx_12799 0 1 24049 value=8 prim::Constant pnnx_12800 0 1 24050 value=8 prim::ListConstruct pnnx_12801 4 1 12577 24049 24050 12610 12621 torch.permute torch.permute_2780 2 1 x7.57 12618 12619 $input=x7.57 $dims=12618 #x7.57=(1,6,8,6,8,192)f32 #12619=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_191 1 1 12619 12620 memory_format=torch.contiguous_format $input=12619 #12619=(1,6,6,8,8,192)f32 #12620=(1,6,6,8,8,192)f32 prim::Constant pnnx_12803 0 1 24051 value=-1 prim::ListConstruct pnnx_12804 3 1 24051 12578 12593 12623 prim::Constant pnnx_12806 0 1 12625 value=1.767767e-01 prim::Constant pnnx_12807 0 1 12626 value=trunc prim::Constant pnnx_12808 0 1 12627 value=6 prim::Constant pnnx_12809 0 1 12628 value=0 prim::Constant pnnx_12810 0 1 12629 value=1 prim::Constant pnnx_12811 0 1 12630 value=2 prim::Constant pnnx_12812 0 1 12631 value=3 prim::Constant pnnx_12813 0 1 12632 value=6 prim::Constant pnnx_12814 0 1 12633 value=4 prim::Constant pnnx_12815 0 1 12634 value=-2 prim::Constant pnnx_12816 0 1 12635 value=-1 prim::Constant pnnx_12817 0 1 12636 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.3.attn 0 1 relative_position_bias_table.129 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.129=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.3.attn 0 1 relative_position_index.129 @relative_position_index=(64,64)i64 #relative_position_index.129=(64,64)i64 Tensor.view Tensor.view_1679 2 1 12620 12621 x_windows.129 $input=12620 $shape=12621 #12620=(1,6,6,8,8,192)f32 #x_windows.129=(36,8,8,192)f32 Tensor.view Tensor.view_1680 2 1 x_windows.129 12623 x8.57 $input=x_windows.129 $shape=12623 #x_windows.129=(36,8,8,192)f32 #x8.57=(36,64,192)f32 aten::size pnnx_12818 2 1 x8.57 12628 12644 #x8.57=(36,64,192)f32 prim::NumToTensor pnnx_12819 1 1 12644 B_.129 aten::Int pnnx_12820 1 1 B_.129 12646 aten::Int pnnx_12821 1 1 B_.129 12647 aten::size pnnx_12822 2 1 x8.57 12629 12648 #x8.57=(36,64,192)f32 prim::NumToTensor pnnx_12823 1 1 12648 N.129 aten::Int pnnx_12824 1 1 N.129 12650 aten::Int pnnx_12825 1 1 N.129 12651 aten::Int pnnx_12826 1 1 N.129 12652 aten::Int pnnx_12827 1 1 N.129 12653 aten::Int pnnx_12828 1 1 N.129 12654 aten::Int pnnx_12829 1 1 N.129 12655 aten::size pnnx_12830 2 1 x8.57 12630 12656 #x8.57=(36,64,192)f32 prim::NumToTensor pnnx_12831 1 1 12656 C.265 aten::Int pnnx_12832 1 1 C.265 12658 nn.Linear layers_dfe.4.residual_group.blocks.3.attn.qkv 1 1 x8.57 12659 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.57=(36,64,192)f32 #12659=(36,64,576)f32 aten::div pnnx_12833 3 1 C.265 12627 12626 12660 aten::Int pnnx_12834 1 1 12660 12661 prim::ListConstruct pnnx_12835 5 1 12647 12655 12631 12632 12661 12662 prim::Constant pnnx_12837 0 1 24052 value=2 prim::Constant pnnx_12838 0 1 24053 value=0 prim::Constant pnnx_12839 0 1 24054 value=3 prim::Constant pnnx_12840 0 1 24055 value=1 prim::ListConstruct pnnx_12841 5 1 24052 24053 24054 24055 12633 12664 Tensor.reshape Tensor.reshape_560 2 1 12659 12662 12663 $input=12659 $shape=12662 #12659=(36,64,576)f32 #12663=(36,64,3,6,32)f32 prim::Constant pnnx_12843 0 1 24056 value=0 prim::Constant pnnx_12844 0 1 24057 value=0 prim::Constant pnnx_12846 0 1 24058 value=0 prim::Constant pnnx_12847 0 1 24059 value=1 prim::Constant pnnx_12849 0 1 24060 value=0 prim::Constant pnnx_12850 0 1 24061 value=2 torch.permute torch.permute_2781 2 1 12663 12664 qkv1.57 $input=12663 $dims=12664 #12663=(36,64,3,6,32)f32 #qkv1.57=(3,36,6,64,32)f32 Tensor.select Tensor.select_839 3 1 qkv1.57 24056 24057 q.129 $input=qkv1.57 $dim=24056 $index=24057 #qkv1.57=(3,36,6,64,32)f32 #q.129=(36,6,64,32)f32 aten::mul pnnx_12852 2 1 q.129 12625 q1.57 #q.129=(36,6,64,32)f32 #q1.57=(36,6,64,32)f32 Tensor.select Tensor.select_840 3 1 qkv1.57 24058 24059 k.129 $input=qkv1.57 $dim=24058 $index=24059 #qkv1.57=(3,36,6,64,32)f32 #k.129=(36,6,64,32)f32 prim::Constant pnnx_12855 0 1 24062 value=-1 prim::ListConstruct pnnx_12856 1 1 24062 12672 Tensor.view Tensor.view_1681 2 1 relative_position_index.129 12672 12673 $input=relative_position_index.129 $shape=12672 #relative_position_index.129=(64,64)i64 #12673=(4096)i64 prim::ListConstruct pnnx_12858 1 1 12673 12674 #12673=(4096)i64 prim::Constant pnnx_12860 0 1 24063 value=64 prim::Constant pnnx_12861 0 1 24064 value=-1 prim::ListConstruct pnnx_12862 3 1 12636 24063 24064 12676 Tensor.index Tensor.index_389 2 1 relative_position_bias_table.129 12674 12675 $input=relative_position_bias_table.129 $expr=12674 #relative_position_bias_table.129=(225,6)f32 #12675=(4096,6)f32 prim::Constant pnnx_12864 0 1 24065 value=2 prim::Constant pnnx_12865 0 1 24066 value=0 prim::Constant pnnx_12866 0 1 24067 value=1 prim::ListConstruct pnnx_12867 3 1 24065 24066 24067 12678 Tensor.view Tensor.view_1682 2 1 12675 12676 relative_position_bias.129 $input=12675 $shape=12676 #12675=(4096,6)f32 #relative_position_bias.129=(64,64,6)f32 prim::Constant pnnx_12871 0 1 24069 value=0 torch.permute torch.permute_2782 2 1 relative_position_bias.129 12678 12679 $input=relative_position_bias.129 $dims=12678 #relative_position_bias.129=(64,64,6)f32 #12679=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_192 1 1 12679 relative_position_bias1.57 memory_format=torch.contiguous_format $input=12679 #12679=(6,64,64)f32 #relative_position_bias1.57=(6,64,64)f32 prim::Constant pnnx_12873 0 1 24070 value=1 torch.transpose torch.transpose_3109 3 1 k.129 12634 12635 12670 $input=k.129 $dim0=12634 $dim1=12635 #k.129=(36,6,64,32)f32 #12670=(36,6,32,64)f32 torch.matmul torch.matmul_2330 2 1 q1.57 12670 attn.259 $input=q1.57 $other=12670 #q1.57=(36,6,64,32)f32 #12670=(36,6,32,64)f32 #attn.259=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3343 2 1 relative_position_bias1.57 24069 12681 $input=relative_position_bias1.57 $dim=24069 #relative_position_bias1.57=(6,64,64)f32 #12681=(1,6,64,64)f32 aten::add pnnx_12874 3 1 attn.259 12681 24070 attn2.29 #attn.259=(36,6,64,64)f32 #12681=(1,6,64,64)f32 #attn2.29=(36,6,64,64)f32 prim::Constant pnnx_12875 0 1 24071 value=0 aten::size pnnx_12876 2 1 attn_mask.65 24071 12683 #attn_mask.65=(36,64,64)f32 prim::NumToTensor pnnx_12877 1 1 12683 other.65 aten::Int pnnx_12878 1 1 other.65 12685 prim::Constant pnnx_12879 0 1 24072 value=trunc aten::div pnnx_12880 3 1 B_.129 other.65 24072 12686 aten::Int pnnx_12881 1 1 12686 12687 prim::Constant pnnx_12882 0 1 24073 value=6 prim::ListConstruct pnnx_12883 5 1 12687 12685 24073 12654 12653 12688 prim::Constant pnnx_12885 0 1 24074 value=1 prim::Constant pnnx_12887 0 1 24075 value=0 prim::Constant pnnx_12889 0 1 24076 value=1 Tensor.view Tensor.view_1683 2 1 attn2.29 12688 12689 $input=attn2.29 $shape=12688 #attn2.29=(36,6,64,64)f32 #12689=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3344 2 1 attn_mask.65 24074 12690 $input=attn_mask.65 $dim=24074 #attn_mask.65=(36,64,64)f32 #12690=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3345 2 1 12690 24075 12691 $input=12690 $dim=24075 #12690=(36,1,64,64)f32 #12691=(1,36,1,64,64)f32 aten::add pnnx_12890 3 1 12689 12691 24076 attn3.29 #12689=(1,36,6,64,64)f32 #12691=(1,36,1,64,64)f32 #attn3.29=(1,36,6,64,64)f32 prim::Constant pnnx_12891 0 1 24077 value=-1 prim::Constant pnnx_12892 0 1 24078 value=6 prim::ListConstruct pnnx_12893 4 1 24077 24078 12652 12651 12693 Tensor.view Tensor.view_1684 2 1 attn3.29 12693 input.287 $input=attn3.29 $shape=12693 #attn3.29=(1,36,6,64,64)f32 #input.287=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.3.attn.softmax 1 1 input.287 12695 dim=-1 #input.287=(36,6,64,64)f32 #12695=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.attn.attn_drop 1 1 12695 12696 #12695=(36,6,64,64)f32 #12696=(36,6,64,64)f32 Tensor.select Tensor.select_841 3 1 qkv1.57 24060 24061 v.129 $input=qkv1.57 $dim=24060 $index=24061 #qkv1.57=(3,36,6,64,32)f32 #v.129=(36,6,64,32)f32 prim::Constant pnnx_12896 0 1 24079 value=1 prim::Constant pnnx_12897 0 1 24080 value=2 torch.matmul torch.matmul_2331 2 1 12696 v.129 12697 $input=12696 $other=v.129 #12696=(36,6,64,64)f32 #v.129=(36,6,64,32)f32 #12697=(36,6,64,32)f32 prim::ListConstruct pnnx_12899 3 1 12646 12650 12658 12699 torch.transpose torch.transpose_3110 3 1 12697 24079 24080 12698 $input=12697 $dim0=24079 $dim1=24080 #12697=(36,6,64,32)f32 #12698=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_561 2 1 12698 12699 input1.59 $input=12698 $shape=12699 #12698=(36,64,6,32)f32 #input1.59=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.3.attn.proj 1 1 input1.59 12701 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.59=(36,64,192)f32 #12701=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.attn.proj_drop 1 1 12701 12702 #12701=(36,64,192)f32 #12702=(36,64,192)f32 prim::Constant pnnx_12901 0 1 24081 value=-1 prim::Constant pnnx_12902 0 1 24082 value=8 prim::Constant pnnx_12903 0 1 24083 value=8 prim::ListConstruct pnnx_12904 4 1 24081 24082 24083 12592 12703 prim::Constant pnnx_12906 0 1 24084 value=8 prim::Constant pnnx_12907 0 1 24085 value=trunc aten::div pnnx_12908 3 1 H0.1 24084 24085 12705 aten::Int pnnx_12909 1 1 12705 12706 prim::Constant pnnx_12910 0 1 24086 value=8 prim::Constant pnnx_12911 0 1 24087 value=trunc aten::div pnnx_12912 3 1 W0.1 24086 24087 12707 aten::Int pnnx_12913 1 1 12707 12708 prim::Constant pnnx_12914 0 1 24088 value=1 prim::Constant pnnx_12915 0 1 24089 value=8 prim::Constant pnnx_12916 0 1 24090 value=8 prim::Constant pnnx_12917 0 1 24091 value=-1 prim::ListConstruct pnnx_12918 6 1 24088 12706 12708 24089 24090 24091 12709 prim::Constant pnnx_12920 0 1 24092 value=0 prim::Constant pnnx_12921 0 1 24093 value=1 prim::Constant pnnx_12922 0 1 24094 value=3 prim::Constant pnnx_12923 0 1 24095 value=2 prim::Constant pnnx_12924 0 1 24096 value=4 prim::Constant pnnx_12925 0 1 24097 value=5 prim::ListConstruct pnnx_12926 6 1 24092 24093 24094 24095 24096 24097 12711 Tensor.view Tensor.view_1685 2 1 12702 12703 windows.129 $input=12702 $shape=12703 #12702=(36,64,192)f32 #windows.129=(36,8,8,192)f32 Tensor.view Tensor.view_1686 2 1 windows.129 12709 x9.57 $input=windows.129 $shape=12709 #windows.129=(36,8,8,192)f32 #x9.57=(1,6,6,8,8,192)f32 prim::Constant pnnx_12930 0 1 24099 value=1 prim::Constant pnnx_12931 0 1 24100 value=-1 prim::ListConstruct pnnx_12932 4 1 24099 905 1145 24100 12714 torch.permute torch.permute_2783 2 1 x9.57 12711 12712 $input=x9.57 $dims=12711 #x9.57=(1,6,6,8,8,192)f32 #12712=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_193 1 1 12712 12713 memory_format=torch.contiguous_format $input=12712 #12712=(1,6,8,6,8,192)f32 #12713=(1,6,8,6,8,192)f32 prim::Constant pnnx_12934 0 1 24101 value=4 prim::Constant pnnx_12935 0 1 24102 value=4 prim::ListConstruct pnnx_12936 2 1 24101 24102 12716 prim::Constant pnnx_12937 0 1 24103 value=1 prim::Constant pnnx_12938 0 1 24104 value=2 prim::ListConstruct pnnx_12939 2 1 24103 24104 12717 Tensor.view Tensor.view_1687 2 1 12713 12714 shifted_x.65 $input=12713 $shape=12714 #12713=(1,6,8,6,8,192)f32 #shifted_x.65=(1,48,48,192)f32 aten::mul pnnx_12941 2 1 H0.1 W0.1 12719 aten::Int pnnx_12942 1 1 12719 12720 prim::ListConstruct pnnx_12943 3 1 12587 12720 12591 12721 prim::Constant pnnx_12945 0 1 12723 value=None prim::Constant pnnx_12946 0 1 24105 value=1 torch.roll torch.roll_2483 3 1 shifted_x.65 12716 12717 x10.29 $input=shifted_x.65 $shifts=12716 $dims=12717 #shifted_x.65=(1,48,48,192)f32 #x10.29=(1,48,48,192)f32 Tensor.view Tensor.view_1688 2 1 x10.29 12721 x11.29 $input=x10.29 $shape=12721 #x10.29=(1,48,48,192)f32 #x11.29=(1,2304,192)f32 aten::add pnnx_12947 3 1 12566 x11.29 24105 input.289 #12566=(1,2304,192)f32 #x11.29=(1,2304,192)f32 #input.289=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.3.norm2 1 1 input.289 12725 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.289=(1,2304,192)f32 #12725=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.3.mlp.fc1 1 1 12725 12730 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #12725=(1,2304,192)f32 #12730=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.3.mlp.act 1 1 12730 12731 #12730=(1,2304,384)f32 #12731=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.mlp.drop 1 1 12731 12732 #12731=(1,2304,384)f32 #12732=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.3.mlp.fc2 1 1 12732 12733 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #12732=(1,2304,384)f32 #12733=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.3.mlp.drop 1 1 12733 12734 #12733=(1,2304,192)f32 #12734=(1,2304,192)f32 prim::Constant pnnx_12948 0 1 12735 value=None prim::Constant pnnx_12949 0 1 24106 value=1 aten::add pnnx_12950 3 1 input.289 12734 24106 12736 #input.289=(1,2304,192)f32 #12734=(1,2304,192)f32 #12736=(1,2304,192)f32 prim::Constant pnnx_12951 0 1 12737 value=trunc prim::Constant pnnx_12952 0 1 12738 value=8 prim::Constant pnnx_12953 0 1 12739 value=0 prim::Constant pnnx_12954 0 1 12740 value=2 prim::Constant pnnx_12955 0 1 12741 value=1 prim::Constant pnnx_12956 0 1 12742 value=3 prim::Constant pnnx_12957 0 1 12743 value=8 prim::Constant pnnx_12958 0 1 12744 value=4 prim::Constant pnnx_12959 0 1 12745 value=5 prim::Constant pnnx_12960 0 1 12746 value=-1 prim::Constant pnnx_12961 0 1 12747 value=64 aten::size pnnx_12962 2 1 12736 12739 12753 #12736=(1,2304,192)f32 prim::NumToTensor pnnx_12963 1 1 12753 B.155 aten::Int pnnx_12964 1 1 B.155 12755 aten::Int pnnx_12965 1 1 B.155 12756 aten::size pnnx_12966 2 1 12736 12740 12757 #12736=(1,2304,192)f32 prim::NumToTensor pnnx_12967 1 1 12757 C.267 aten::Int pnnx_12968 1 1 C.267 12759 aten::Int pnnx_12969 1 1 C.267 12760 aten::Int pnnx_12970 1 1 C.267 12761 aten::Int pnnx_12971 1 1 C.267 12762 nn.LayerNorm layers_dfe.4.residual_group.blocks.4.norm1 1 1 12736 12763 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #12736=(1,2304,192)f32 #12763=(1,2304,192)f32 prim::ListConstruct pnnx_12972 4 1 12756 902 1142 12762 12764 prim::Constant pnnx_12974 0 1 24107 value=0 Tensor.view Tensor.view_1689 2 1 12763 12764 x.131 $input=12763 $shape=12764 #12763=(1,2304,192)f32 #x.131=(1,48,48,192)f32 aten::size pnnx_12975 2 1 x.131 24107 12766 #x.131=(1,48,48,192)f32 prim::NumToTensor pnnx_12976 1 1 12766 B1.59 aten::Int pnnx_12977 1 1 B1.59 12768 aten::size pnnx_12978 2 1 x.131 12741 12769 #x.131=(1,48,48,192)f32 prim::NumToTensor pnnx_12979 1 1 12769 12770 prim::Constant pnnx_12980 0 1 24108 value=2 aten::size pnnx_12981 2 1 x.131 24108 12771 #x.131=(1,48,48,192)f32 prim::NumToTensor pnnx_12982 1 1 12771 12772 aten::size pnnx_12983 2 1 x.131 12742 12773 #x.131=(1,48,48,192)f32 prim::NumToTensor pnnx_12984 1 1 12773 C1.59 aten::Int pnnx_12985 1 1 C1.59 12775 aten::Int pnnx_12986 1 1 C1.59 12776 aten::div pnnx_12987 3 1 12770 12738 12737 12777 aten::Int pnnx_12988 1 1 12777 12778 prim::Constant pnnx_12989 0 1 24109 value=8 prim::Constant pnnx_12990 0 1 24110 value=trunc aten::div pnnx_12991 3 1 12772 24109 24110 12779 aten::Int pnnx_12992 1 1 12779 12780 prim::Constant pnnx_12993 0 1 24111 value=8 prim::ListConstruct pnnx_12994 6 1 12768 12778 12743 12780 24111 12776 12781 prim::Constant pnnx_12996 0 1 24112 value=0 prim::Constant pnnx_12997 0 1 24113 value=1 prim::Constant pnnx_12998 0 1 24114 value=3 prim::Constant pnnx_12999 0 1 24115 value=2 prim::ListConstruct pnnx_13000 6 1 24112 24113 24114 24115 12744 12745 12783 Tensor.view Tensor.view_1690 2 1 x.131 12781 x5.67 $input=x.131 $shape=12781 #x.131=(1,48,48,192)f32 #x5.67=(1,6,8,6,8,192)f32 prim::Constant pnnx_13004 0 1 24117 value=8 prim::Constant pnnx_13005 0 1 24118 value=8 prim::ListConstruct pnnx_13006 4 1 12746 24117 24118 12775 12786 torch.permute torch.permute_2784 2 1 x5.67 12783 12784 $input=x5.67 $dims=12783 #x5.67=(1,6,8,6,8,192)f32 #12784=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_194 1 1 12784 12785 memory_format=torch.contiguous_format $input=12784 #12784=(1,6,6,8,8,192)f32 #12785=(1,6,6,8,8,192)f32 prim::Constant pnnx_13008 0 1 24119 value=-1 prim::ListConstruct pnnx_13009 3 1 24119 12747 12761 12788 prim::Constant pnnx_13011 0 1 12790 value=1.767767e-01 prim::Constant pnnx_13012 0 1 12791 value=trunc prim::Constant pnnx_13013 0 1 12792 value=6 prim::Constant pnnx_13014 0 1 12793 value=0 prim::Constant pnnx_13015 0 1 12794 value=1 prim::Constant pnnx_13016 0 1 12795 value=2 prim::Constant pnnx_13017 0 1 12796 value=3 prim::Constant pnnx_13018 0 1 12797 value=6 prim::Constant pnnx_13019 0 1 12798 value=4 prim::Constant pnnx_13020 0 1 12799 value=-2 prim::Constant pnnx_13021 0 1 12800 value=-1 prim::Constant pnnx_13022 0 1 12801 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.4.attn 0 1 relative_position_bias_table.131 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.131=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.4.attn 0 1 relative_position_index.131 @relative_position_index=(64,64)i64 #relative_position_index.131=(64,64)i64 Tensor.view Tensor.view_1691 2 1 12785 12786 x_windows.131 $input=12785 $shape=12786 #12785=(1,6,6,8,8,192)f32 #x_windows.131=(36,8,8,192)f32 Tensor.view Tensor.view_1692 2 1 x_windows.131 12788 x6.59 $input=x_windows.131 $shape=12788 #x_windows.131=(36,8,8,192)f32 #x6.59=(36,64,192)f32 aten::size pnnx_13023 2 1 x6.59 12793 12809 #x6.59=(36,64,192)f32 prim::NumToTensor pnnx_13024 1 1 12809 B_.131 aten::Int pnnx_13025 1 1 B_.131 12811 aten::Int pnnx_13026 1 1 B_.131 12812 aten::size pnnx_13027 2 1 x6.59 12794 12813 #x6.59=(36,64,192)f32 prim::NumToTensor pnnx_13028 1 1 12813 N.131 aten::Int pnnx_13029 1 1 N.131 12815 aten::Int pnnx_13030 1 1 N.131 12816 aten::size pnnx_13031 2 1 x6.59 12795 12817 #x6.59=(36,64,192)f32 prim::NumToTensor pnnx_13032 1 1 12817 C.269 aten::Int pnnx_13033 1 1 C.269 12819 nn.Linear layers_dfe.4.residual_group.blocks.4.attn.qkv 1 1 x6.59 12820 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.59=(36,64,192)f32 #12820=(36,64,576)f32 aten::div pnnx_13034 3 1 C.269 12792 12791 12821 aten::Int pnnx_13035 1 1 12821 12822 prim::ListConstruct pnnx_13036 5 1 12812 12816 12796 12797 12822 12823 prim::Constant pnnx_13038 0 1 24120 value=2 prim::Constant pnnx_13039 0 1 24121 value=0 prim::Constant pnnx_13040 0 1 24122 value=3 prim::Constant pnnx_13041 0 1 24123 value=1 prim::ListConstruct pnnx_13042 5 1 24120 24121 24122 24123 12798 12825 Tensor.reshape Tensor.reshape_562 2 1 12820 12823 12824 $input=12820 $shape=12823 #12820=(36,64,576)f32 #12824=(36,64,3,6,32)f32 prim::Constant pnnx_13044 0 1 24124 value=0 prim::Constant pnnx_13045 0 1 24125 value=0 prim::Constant pnnx_13047 0 1 24126 value=0 prim::Constant pnnx_13048 0 1 24127 value=1 prim::Constant pnnx_13050 0 1 24128 value=0 prim::Constant pnnx_13051 0 1 24129 value=2 torch.permute torch.permute_2785 2 1 12824 12825 qkv1.59 $input=12824 $dims=12825 #12824=(36,64,3,6,32)f32 #qkv1.59=(3,36,6,64,32)f32 Tensor.select Tensor.select_842 3 1 qkv1.59 24124 24125 q.131 $input=qkv1.59 $dim=24124 $index=24125 #qkv1.59=(3,36,6,64,32)f32 #q.131=(36,6,64,32)f32 aten::mul pnnx_13053 2 1 q.131 12790 q1.59 #q.131=(36,6,64,32)f32 #q1.59=(36,6,64,32)f32 Tensor.select Tensor.select_843 3 1 qkv1.59 24126 24127 k.131 $input=qkv1.59 $dim=24126 $index=24127 #qkv1.59=(3,36,6,64,32)f32 #k.131=(36,6,64,32)f32 prim::Constant pnnx_13056 0 1 24130 value=-1 prim::ListConstruct pnnx_13057 1 1 24130 12833 Tensor.view Tensor.view_1693 2 1 relative_position_index.131 12833 12834 $input=relative_position_index.131 $shape=12833 #relative_position_index.131=(64,64)i64 #12834=(4096)i64 prim::ListConstruct pnnx_13059 1 1 12834 12835 #12834=(4096)i64 prim::Constant pnnx_13061 0 1 24131 value=64 prim::Constant pnnx_13062 0 1 24132 value=-1 prim::ListConstruct pnnx_13063 3 1 12801 24131 24132 12837 Tensor.index Tensor.index_390 2 1 relative_position_bias_table.131 12835 12836 $input=relative_position_bias_table.131 $expr=12835 #relative_position_bias_table.131=(225,6)f32 #12836=(4096,6)f32 prim::Constant pnnx_13065 0 1 24133 value=2 prim::Constant pnnx_13066 0 1 24134 value=0 prim::Constant pnnx_13067 0 1 24135 value=1 prim::ListConstruct pnnx_13068 3 1 24133 24134 24135 12839 Tensor.view Tensor.view_1694 2 1 12836 12837 relative_position_bias.131 $input=12836 $shape=12837 #12836=(4096,6)f32 #relative_position_bias.131=(64,64,6)f32 prim::Constant pnnx_13072 0 1 24137 value=0 torch.permute torch.permute_2786 2 1 relative_position_bias.131 12839 12840 $input=relative_position_bias.131 $dims=12839 #relative_position_bias.131=(64,64,6)f32 #12840=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_195 1 1 12840 relative_position_bias1.59 memory_format=torch.contiguous_format $input=12840 #12840=(6,64,64)f32 #relative_position_bias1.59=(6,64,64)f32 prim::Constant pnnx_13074 0 1 24138 value=1 torch.transpose torch.transpose_3111 3 1 k.131 12799 12800 12831 $input=k.131 $dim0=12799 $dim1=12800 #k.131=(36,6,64,32)f32 #12831=(36,6,32,64)f32 torch.matmul torch.matmul_2332 2 1 q1.59 12831 attn.263 $input=q1.59 $other=12831 #q1.59=(36,6,64,32)f32 #12831=(36,6,32,64)f32 #attn.263=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3346 2 1 relative_position_bias1.59 24137 12842 $input=relative_position_bias1.59 $dim=24137 #relative_position_bias1.59=(6,64,64)f32 #12842=(1,6,64,64)f32 aten::add pnnx_13075 3 1 attn.263 12842 24138 input.291 #attn.263=(36,6,64,64)f32 #12842=(1,6,64,64)f32 #input.291=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.4.attn.softmax 1 1 input.291 12844 dim=-1 #input.291=(36,6,64,64)f32 #12844=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.attn.attn_drop 1 1 12844 12845 #12844=(36,6,64,64)f32 #12845=(36,6,64,64)f32 Tensor.select Tensor.select_844 3 1 qkv1.59 24128 24129 v.131 $input=qkv1.59 $dim=24128 $index=24129 #qkv1.59=(3,36,6,64,32)f32 #v.131=(36,6,64,32)f32 prim::Constant pnnx_13077 0 1 24139 value=1 prim::Constant pnnx_13078 0 1 24140 value=2 torch.matmul torch.matmul_2333 2 1 12845 v.131 12846 $input=12845 $other=v.131 #12845=(36,6,64,64)f32 #v.131=(36,6,64,32)f32 #12846=(36,6,64,32)f32 prim::ListConstruct pnnx_13080 3 1 12811 12815 12819 12848 torch.transpose torch.transpose_3112 3 1 12846 24139 24140 12847 $input=12846 $dim0=24139 $dim1=24140 #12846=(36,6,64,32)f32 #12847=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_563 2 1 12847 12848 input1.61 $input=12847 $shape=12848 #12847=(36,64,6,32)f32 #input1.61=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.4.attn.proj 1 1 input1.61 12850 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.61=(36,64,192)f32 #12850=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.attn.proj_drop 1 1 12850 12851 #12850=(36,64,192)f32 #12851=(36,64,192)f32 prim::Constant pnnx_13082 0 1 24141 value=-1 prim::Constant pnnx_13083 0 1 24142 value=8 prim::Constant pnnx_13084 0 1 24143 value=8 prim::ListConstruct pnnx_13085 4 1 24141 24142 24143 12760 12852 prim::Constant pnnx_13087 0 1 24144 value=8 prim::Constant pnnx_13088 0 1 24145 value=trunc aten::div pnnx_13089 3 1 H0.1 24144 24145 12854 aten::Int pnnx_13090 1 1 12854 12855 prim::Constant pnnx_13091 0 1 24146 value=8 prim::Constant pnnx_13092 0 1 24147 value=trunc aten::div pnnx_13093 3 1 W0.1 24146 24147 12856 aten::Int pnnx_13094 1 1 12856 12857 prim::Constant pnnx_13095 0 1 24148 value=1 prim::Constant pnnx_13096 0 1 24149 value=8 prim::Constant pnnx_13097 0 1 24150 value=8 prim::Constant pnnx_13098 0 1 24151 value=-1 prim::ListConstruct pnnx_13099 6 1 24148 12855 12857 24149 24150 24151 12858 prim::Constant pnnx_13101 0 1 24152 value=0 prim::Constant pnnx_13102 0 1 24153 value=1 prim::Constant pnnx_13103 0 1 24154 value=3 prim::Constant pnnx_13104 0 1 24155 value=2 prim::Constant pnnx_13105 0 1 24156 value=4 prim::Constant pnnx_13106 0 1 24157 value=5 prim::ListConstruct pnnx_13107 6 1 24152 24153 24154 24155 24156 24157 12860 Tensor.view Tensor.view_1695 2 1 12851 12852 windows.131 $input=12851 $shape=12852 #12851=(36,64,192)f32 #windows.131=(36,8,8,192)f32 Tensor.view Tensor.view_1696 2 1 windows.131 12858 x7.59 $input=windows.131 $shape=12858 #windows.131=(36,8,8,192)f32 #x7.59=(1,6,6,8,8,192)f32 prim::Constant pnnx_13111 0 1 24159 value=1 prim::Constant pnnx_13112 0 1 24160 value=-1 prim::ListConstruct pnnx_13113 4 1 24159 899 1139 24160 12863 torch.permute torch.permute_2787 2 1 x7.59 12860 12861 $input=x7.59 $dims=12860 #x7.59=(1,6,6,8,8,192)f32 #12861=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_196 1 1 12861 12862 memory_format=torch.contiguous_format $input=12861 #12861=(1,6,8,6,8,192)f32 #12862=(1,6,8,6,8,192)f32 aten::mul pnnx_13115 2 1 H0.1 W0.1 12865 aten::Int pnnx_13116 1 1 12865 12866 prim::ListConstruct pnnx_13117 3 1 12755 12866 12759 12867 prim::Constant pnnx_13119 0 1 12869 value=None prim::Constant pnnx_13120 0 1 24161 value=1 Tensor.view Tensor.view_1697 2 1 12862 12863 x8.59 $input=12862 $shape=12863 #12862=(1,6,8,6,8,192)f32 #x8.59=(1,48,48,192)f32 Tensor.view Tensor.view_1698 2 1 x8.59 12867 x9.59 $input=x8.59 $shape=12867 #x8.59=(1,48,48,192)f32 #x9.59=(1,2304,192)f32 aten::add pnnx_13121 3 1 12736 x9.59 24161 input.293 #12736=(1,2304,192)f32 #x9.59=(1,2304,192)f32 #input.293=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.4.norm2 1 1 input.293 12871 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.293=(1,2304,192)f32 #12871=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.4.mlp.fc1 1 1 12871 12876 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #12871=(1,2304,192)f32 #12876=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.4.mlp.act 1 1 12876 12877 #12876=(1,2304,384)f32 #12877=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.mlp.drop 1 1 12877 12878 #12877=(1,2304,384)f32 #12878=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.4.mlp.fc2 1 1 12878 12879 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #12878=(1,2304,384)f32 #12879=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.4.mlp.drop 1 1 12879 12880 #12879=(1,2304,192)f32 #12880=(1,2304,192)f32 prim::Constant pnnx_13122 0 1 12881 value=None prim::Constant pnnx_13123 0 1 24162 value=1 aten::add pnnx_13124 3 1 input.293 12880 24162 12882 #input.293=(1,2304,192)f32 #12880=(1,2304,192)f32 #12882=(1,2304,192)f32 prim::Constant pnnx_13125 0 1 12883 value=trunc prim::Constant pnnx_13126 0 1 12884 value=8 prim::Constant pnnx_13127 0 1 12885 value=0 prim::Constant pnnx_13128 0 1 12886 value=2 prim::Constant pnnx_13129 0 1 12887 value=-4 prim::Constant pnnx_13130 0 1 12888 value=1 prim::Constant pnnx_13131 0 1 12889 value=3 prim::Constant pnnx_13132 0 1 12890 value=8 prim::Constant pnnx_13133 0 1 12891 value=4 prim::Constant pnnx_13134 0 1 12892 value=5 prim::Constant pnnx_13135 0 1 12893 value=-1 prim::Constant pnnx_13136 0 1 12894 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.5 0 1 attn_mask.67 @attn_mask=(36,64,64)f32 #attn_mask.67=(36,64,64)f32 aten::size pnnx_13137 2 1 12882 12885 12901 #12882=(1,2304,192)f32 prim::NumToTensor pnnx_13138 1 1 12901 B.157 aten::Int pnnx_13139 1 1 B.157 12903 aten::Int pnnx_13140 1 1 B.157 12904 aten::size pnnx_13141 2 1 12882 12886 12905 #12882=(1,2304,192)f32 prim::NumToTensor pnnx_13142 1 1 12905 C.271 aten::Int pnnx_13143 1 1 C.271 12907 aten::Int pnnx_13144 1 1 C.271 12908 aten::Int pnnx_13145 1 1 C.271 12909 aten::Int pnnx_13146 1 1 C.271 12910 nn.LayerNorm layers_dfe.4.residual_group.blocks.5.norm1 1 1 12882 12911 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #12882=(1,2304,192)f32 #12911=(1,2304,192)f32 prim::ListConstruct pnnx_13147 4 1 12904 896 1136 12910 12912 prim::Constant pnnx_13149 0 1 24163 value=-4 prim::ListConstruct pnnx_13150 2 1 12887 24163 12914 prim::Constant pnnx_13151 0 1 24164 value=2 prim::ListConstruct pnnx_13152 2 1 12888 24164 12915 Tensor.view Tensor.view_1699 2 1 12911 12912 x.133 $input=12911 $shape=12912 #12911=(1,2304,192)f32 #x.133=(1,48,48,192)f32 prim::Constant pnnx_13154 0 1 24165 value=0 torch.roll torch.roll_2484 3 1 x.133 12914 12915 x6.61 $input=x.133 $shifts=12914 $dims=12915 #x.133=(1,48,48,192)f32 #x6.61=(1,48,48,192)f32 aten::size pnnx_13155 2 1 x6.61 24165 12917 #x6.61=(1,48,48,192)f32 prim::NumToTensor pnnx_13156 1 1 12917 B1.61 aten::Int pnnx_13157 1 1 B1.61 12919 prim::Constant pnnx_13158 0 1 24166 value=1 aten::size pnnx_13159 2 1 x6.61 24166 12920 #x6.61=(1,48,48,192)f32 prim::NumToTensor pnnx_13160 1 1 12920 12921 prim::Constant pnnx_13161 0 1 24167 value=2 aten::size pnnx_13162 2 1 x6.61 24167 12922 #x6.61=(1,48,48,192)f32 prim::NumToTensor pnnx_13163 1 1 12922 12923 aten::size pnnx_13164 2 1 x6.61 12889 12924 #x6.61=(1,48,48,192)f32 prim::NumToTensor pnnx_13165 1 1 12924 C1.61 aten::Int pnnx_13166 1 1 C1.61 12926 aten::Int pnnx_13167 1 1 C1.61 12927 aten::div pnnx_13168 3 1 12921 12884 12883 12928 aten::Int pnnx_13169 1 1 12928 12929 prim::Constant pnnx_13170 0 1 24168 value=8 prim::Constant pnnx_13171 0 1 24169 value=trunc aten::div pnnx_13172 3 1 12923 24168 24169 12930 aten::Int pnnx_13173 1 1 12930 12931 prim::Constant pnnx_13174 0 1 24170 value=8 prim::ListConstruct pnnx_13175 6 1 12919 12929 12890 12931 24170 12927 12932 prim::Constant pnnx_13177 0 1 24171 value=0 prim::Constant pnnx_13178 0 1 24172 value=1 prim::Constant pnnx_13179 0 1 24173 value=3 prim::Constant pnnx_13180 0 1 24174 value=2 prim::ListConstruct pnnx_13181 6 1 24171 24172 24173 24174 12891 12892 12934 Tensor.view Tensor.view_1700 2 1 x6.61 12932 x7.61 $input=x6.61 $shape=12932 #x6.61=(1,48,48,192)f32 #x7.61=(1,6,8,6,8,192)f32 prim::Constant pnnx_13185 0 1 24176 value=8 prim::Constant pnnx_13186 0 1 24177 value=8 prim::ListConstruct pnnx_13187 4 1 12893 24176 24177 12926 12937 torch.permute torch.permute_2788 2 1 x7.61 12934 12935 $input=x7.61 $dims=12934 #x7.61=(1,6,8,6,8,192)f32 #12935=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_197 1 1 12935 12936 memory_format=torch.contiguous_format $input=12935 #12935=(1,6,6,8,8,192)f32 #12936=(1,6,6,8,8,192)f32 prim::Constant pnnx_13189 0 1 24178 value=-1 prim::ListConstruct pnnx_13190 3 1 24178 12894 12909 12939 prim::Constant pnnx_13192 0 1 12941 value=1.767767e-01 prim::Constant pnnx_13193 0 1 12942 value=trunc prim::Constant pnnx_13194 0 1 12943 value=6 prim::Constant pnnx_13195 0 1 12944 value=0 prim::Constant pnnx_13196 0 1 12945 value=1 prim::Constant pnnx_13197 0 1 12946 value=2 prim::Constant pnnx_13198 0 1 12947 value=3 prim::Constant pnnx_13199 0 1 12948 value=6 prim::Constant pnnx_13200 0 1 12949 value=4 prim::Constant pnnx_13201 0 1 12950 value=-2 prim::Constant pnnx_13202 0 1 12951 value=-1 prim::Constant pnnx_13203 0 1 12952 value=64 pnnx.Attribute layers_dfe.4.residual_group.blocks.5.attn 0 1 relative_position_bias_table.133 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.133=(225,6)f32 pnnx.Attribute layers_dfe.4.residual_group.blocks.5.attn 0 1 relative_position_index.133 @relative_position_index=(64,64)i64 #relative_position_index.133=(64,64)i64 Tensor.view Tensor.view_1701 2 1 12936 12937 x_windows.133 $input=12936 $shape=12937 #12936=(1,6,6,8,8,192)f32 #x_windows.133=(36,8,8,192)f32 Tensor.view Tensor.view_1702 2 1 x_windows.133 12939 x8.61 $input=x_windows.133 $shape=12939 #x_windows.133=(36,8,8,192)f32 #x8.61=(36,64,192)f32 aten::size pnnx_13204 2 1 x8.61 12944 12960 #x8.61=(36,64,192)f32 prim::NumToTensor pnnx_13205 1 1 12960 B_.133 aten::Int pnnx_13206 1 1 B_.133 12962 aten::Int pnnx_13207 1 1 B_.133 12963 aten::size pnnx_13208 2 1 x8.61 12945 12964 #x8.61=(36,64,192)f32 prim::NumToTensor pnnx_13209 1 1 12964 N.133 aten::Int pnnx_13210 1 1 N.133 12966 aten::Int pnnx_13211 1 1 N.133 12967 aten::Int pnnx_13212 1 1 N.133 12968 aten::Int pnnx_13213 1 1 N.133 12969 aten::Int pnnx_13214 1 1 N.133 12970 aten::Int pnnx_13215 1 1 N.133 12971 aten::size pnnx_13216 2 1 x8.61 12946 12972 #x8.61=(36,64,192)f32 prim::NumToTensor pnnx_13217 1 1 12972 C.273 aten::Int pnnx_13218 1 1 C.273 12974 nn.Linear layers_dfe.4.residual_group.blocks.5.attn.qkv 1 1 x8.61 12975 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.61=(36,64,192)f32 #12975=(36,64,576)f32 aten::div pnnx_13219 3 1 C.273 12943 12942 12976 aten::Int pnnx_13220 1 1 12976 12977 prim::ListConstruct pnnx_13221 5 1 12963 12971 12947 12948 12977 12978 prim::Constant pnnx_13223 0 1 24179 value=2 prim::Constant pnnx_13224 0 1 24180 value=0 prim::Constant pnnx_13225 0 1 24181 value=3 prim::Constant pnnx_13226 0 1 24182 value=1 prim::ListConstruct pnnx_13227 5 1 24179 24180 24181 24182 12949 12980 Tensor.reshape Tensor.reshape_564 2 1 12975 12978 12979 $input=12975 $shape=12978 #12975=(36,64,576)f32 #12979=(36,64,3,6,32)f32 prim::Constant pnnx_13229 0 1 24183 value=0 prim::Constant pnnx_13230 0 1 24184 value=0 prim::Constant pnnx_13232 0 1 24185 value=0 prim::Constant pnnx_13233 0 1 24186 value=1 prim::Constant pnnx_13235 0 1 24187 value=0 prim::Constant pnnx_13236 0 1 24188 value=2 torch.permute torch.permute_2789 2 1 12979 12980 qkv1.61 $input=12979 $dims=12980 #12979=(36,64,3,6,32)f32 #qkv1.61=(3,36,6,64,32)f32 Tensor.select Tensor.select_845 3 1 qkv1.61 24183 24184 q.133 $input=qkv1.61 $dim=24183 $index=24184 #qkv1.61=(3,36,6,64,32)f32 #q.133=(36,6,64,32)f32 aten::mul pnnx_13238 2 1 q.133 12941 q1.61 #q.133=(36,6,64,32)f32 #q1.61=(36,6,64,32)f32 Tensor.select Tensor.select_846 3 1 qkv1.61 24185 24186 k.133 $input=qkv1.61 $dim=24185 $index=24186 #qkv1.61=(3,36,6,64,32)f32 #k.133=(36,6,64,32)f32 prim::Constant pnnx_13241 0 1 24189 value=-1 prim::ListConstruct pnnx_13242 1 1 24189 12988 Tensor.view Tensor.view_1703 2 1 relative_position_index.133 12988 12989 $input=relative_position_index.133 $shape=12988 #relative_position_index.133=(64,64)i64 #12989=(4096)i64 prim::ListConstruct pnnx_13244 1 1 12989 12990 #12989=(4096)i64 prim::Constant pnnx_13246 0 1 24190 value=64 prim::Constant pnnx_13247 0 1 24191 value=-1 prim::ListConstruct pnnx_13248 3 1 12952 24190 24191 12992 Tensor.index Tensor.index_391 2 1 relative_position_bias_table.133 12990 12991 $input=relative_position_bias_table.133 $expr=12990 #relative_position_bias_table.133=(225,6)f32 #12991=(4096,6)f32 prim::Constant pnnx_13250 0 1 24192 value=2 prim::Constant pnnx_13251 0 1 24193 value=0 prim::Constant pnnx_13252 0 1 24194 value=1 prim::ListConstruct pnnx_13253 3 1 24192 24193 24194 12994 Tensor.view Tensor.view_1704 2 1 12991 12992 relative_position_bias.133 $input=12991 $shape=12992 #12991=(4096,6)f32 #relative_position_bias.133=(64,64,6)f32 prim::Constant pnnx_13257 0 1 24196 value=0 torch.permute torch.permute_2790 2 1 relative_position_bias.133 12994 12995 $input=relative_position_bias.133 $dims=12994 #relative_position_bias.133=(64,64,6)f32 #12995=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_198 1 1 12995 relative_position_bias1.61 memory_format=torch.contiguous_format $input=12995 #12995=(6,64,64)f32 #relative_position_bias1.61=(6,64,64)f32 prim::Constant pnnx_13259 0 1 24197 value=1 torch.transpose torch.transpose_3113 3 1 k.133 12950 12951 12986 $input=k.133 $dim0=12950 $dim1=12951 #k.133=(36,6,64,32)f32 #12986=(36,6,32,64)f32 torch.matmul torch.matmul_2334 2 1 q1.61 12986 attn.267 $input=q1.61 $other=12986 #q1.61=(36,6,64,32)f32 #12986=(36,6,32,64)f32 #attn.267=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3347 2 1 relative_position_bias1.61 24196 12997 $input=relative_position_bias1.61 $dim=24196 #relative_position_bias1.61=(6,64,64)f32 #12997=(1,6,64,64)f32 aten::add pnnx_13260 3 1 attn.267 12997 24197 attn2.31 #attn.267=(36,6,64,64)f32 #12997=(1,6,64,64)f32 #attn2.31=(36,6,64,64)f32 prim::Constant pnnx_13261 0 1 24198 value=0 aten::size pnnx_13262 2 1 attn_mask.67 24198 12999 #attn_mask.67=(36,64,64)f32 prim::NumToTensor pnnx_13263 1 1 12999 other.67 aten::Int pnnx_13264 1 1 other.67 13001 prim::Constant pnnx_13265 0 1 24199 value=trunc aten::div pnnx_13266 3 1 B_.133 other.67 24199 13002 aten::Int pnnx_13267 1 1 13002 13003 prim::Constant pnnx_13268 0 1 24200 value=6 prim::ListConstruct pnnx_13269 5 1 13003 13001 24200 12970 12969 13004 prim::Constant pnnx_13271 0 1 24201 value=1 prim::Constant pnnx_13273 0 1 24202 value=0 prim::Constant pnnx_13275 0 1 24203 value=1 Tensor.view Tensor.view_1705 2 1 attn2.31 13004 13005 $input=attn2.31 $shape=13004 #attn2.31=(36,6,64,64)f32 #13005=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3348 2 1 attn_mask.67 24201 13006 $input=attn_mask.67 $dim=24201 #attn_mask.67=(36,64,64)f32 #13006=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3349 2 1 13006 24202 13007 $input=13006 $dim=24202 #13006=(36,1,64,64)f32 #13007=(1,36,1,64,64)f32 aten::add pnnx_13276 3 1 13005 13007 24203 attn3.31 #13005=(1,36,6,64,64)f32 #13007=(1,36,1,64,64)f32 #attn3.31=(1,36,6,64,64)f32 prim::Constant pnnx_13277 0 1 24204 value=-1 prim::Constant pnnx_13278 0 1 24205 value=6 prim::ListConstruct pnnx_13279 4 1 24204 24205 12968 12967 13009 Tensor.view Tensor.view_1706 2 1 attn3.31 13009 input.295 $input=attn3.31 $shape=13009 #attn3.31=(1,36,6,64,64)f32 #input.295=(36,6,64,64)f32 nn.Softmax layers_dfe.4.residual_group.blocks.5.attn.softmax 1 1 input.295 13011 dim=-1 #input.295=(36,6,64,64)f32 #13011=(36,6,64,64)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.attn.attn_drop 1 1 13011 13012 #13011=(36,6,64,64)f32 #13012=(36,6,64,64)f32 Tensor.select Tensor.select_847 3 1 qkv1.61 24187 24188 v.133 $input=qkv1.61 $dim=24187 $index=24188 #qkv1.61=(3,36,6,64,32)f32 #v.133=(36,6,64,32)f32 prim::Constant pnnx_13282 0 1 24206 value=1 prim::Constant pnnx_13283 0 1 24207 value=2 torch.matmul torch.matmul_2335 2 1 13012 v.133 13013 $input=13012 $other=v.133 #13012=(36,6,64,64)f32 #v.133=(36,6,64,32)f32 #13013=(36,6,64,32)f32 prim::ListConstruct pnnx_13285 3 1 12962 12966 12974 13015 torch.transpose torch.transpose_3114 3 1 13013 24206 24207 13014 $input=13013 $dim0=24206 $dim1=24207 #13013=(36,6,64,32)f32 #13014=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_565 2 1 13014 13015 input1.63 $input=13014 $shape=13015 #13014=(36,64,6,32)f32 #input1.63=(36,64,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.5.attn.proj 1 1 input1.63 13017 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.63=(36,64,192)f32 #13017=(36,64,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.attn.proj_drop 1 1 13017 13018 #13017=(36,64,192)f32 #13018=(36,64,192)f32 prim::Constant pnnx_13287 0 1 24208 value=-1 prim::Constant pnnx_13288 0 1 24209 value=8 prim::Constant pnnx_13289 0 1 24210 value=8 prim::ListConstruct pnnx_13290 4 1 24208 24209 24210 12908 13019 prim::Constant pnnx_13292 0 1 24211 value=8 prim::Constant pnnx_13293 0 1 24212 value=trunc aten::div pnnx_13294 3 1 H0.1 24211 24212 13021 aten::Int pnnx_13295 1 1 13021 13022 prim::Constant pnnx_13296 0 1 24213 value=8 prim::Constant pnnx_13297 0 1 24214 value=trunc aten::div pnnx_13298 3 1 W0.1 24213 24214 13023 aten::Int pnnx_13299 1 1 13023 13024 prim::Constant pnnx_13300 0 1 24215 value=1 prim::Constant pnnx_13301 0 1 24216 value=8 prim::Constant pnnx_13302 0 1 24217 value=8 prim::Constant pnnx_13303 0 1 24218 value=-1 prim::ListConstruct pnnx_13304 6 1 24215 13022 13024 24216 24217 24218 13025 prim::Constant pnnx_13306 0 1 24219 value=0 prim::Constant pnnx_13307 0 1 24220 value=1 prim::Constant pnnx_13308 0 1 24221 value=3 prim::Constant pnnx_13309 0 1 24222 value=2 prim::Constant pnnx_13310 0 1 24223 value=4 prim::Constant pnnx_13311 0 1 24224 value=5 prim::ListConstruct pnnx_13312 6 1 24219 24220 24221 24222 24223 24224 13027 Tensor.view Tensor.view_1707 2 1 13018 13019 windows.133 $input=13018 $shape=13019 #13018=(36,64,192)f32 #windows.133=(36,8,8,192)f32 Tensor.view Tensor.view_1708 2 1 windows.133 13025 x9.61 $input=windows.133 $shape=13025 #windows.133=(36,8,8,192)f32 #x9.61=(1,6,6,8,8,192)f32 prim::Constant pnnx_13316 0 1 24226 value=1 prim::Constant pnnx_13317 0 1 24227 value=-1 prim::ListConstruct pnnx_13318 4 1 24226 893 1133 24227 13030 torch.permute torch.permute_2791 2 1 x9.61 13027 13028 $input=x9.61 $dims=13027 #x9.61=(1,6,6,8,8,192)f32 #13028=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_199 1 1 13028 13029 memory_format=torch.contiguous_format $input=13028 #13028=(1,6,8,6,8,192)f32 #13029=(1,6,8,6,8,192)f32 prim::Constant pnnx_13320 0 1 24228 value=4 prim::Constant pnnx_13321 0 1 24229 value=4 prim::ListConstruct pnnx_13322 2 1 24228 24229 13032 prim::Constant pnnx_13323 0 1 24230 value=1 prim::Constant pnnx_13324 0 1 24231 value=2 prim::ListConstruct pnnx_13325 2 1 24230 24231 13033 Tensor.view Tensor.view_1709 2 1 13029 13030 shifted_x.67 $input=13029 $shape=13030 #13029=(1,6,8,6,8,192)f32 #shifted_x.67=(1,48,48,192)f32 aten::mul pnnx_13327 2 1 H0.1 W0.1 13035 aten::Int pnnx_13328 1 1 13035 13036 prim::ListConstruct pnnx_13329 3 1 12903 13036 12907 13037 prim::Constant pnnx_13331 0 1 13039 value=None prim::Constant pnnx_13332 0 1 24232 value=1 torch.roll torch.roll_2485 3 1 shifted_x.67 13032 13033 x10.31 $input=shifted_x.67 $shifts=13032 $dims=13033 #shifted_x.67=(1,48,48,192)f32 #x10.31=(1,48,48,192)f32 Tensor.view Tensor.view_1710 2 1 x10.31 13037 x11.31 $input=x10.31 $shape=13037 #x10.31=(1,48,48,192)f32 #x11.31=(1,2304,192)f32 aten::add pnnx_13333 3 1 12882 x11.31 24232 input.297 #12882=(1,2304,192)f32 #x11.31=(1,2304,192)f32 #input.297=(1,2304,192)f32 nn.LayerNorm layers_dfe.4.residual_group.blocks.5.norm2 1 1 input.297 13041 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.297=(1,2304,192)f32 #13041=(1,2304,192)f32 nn.Linear layers_dfe.4.residual_group.blocks.5.mlp.fc1 1 1 13041 13046 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #13041=(1,2304,192)f32 #13046=(1,2304,384)f32 nn.GELU layers_dfe.4.residual_group.blocks.5.mlp.act 1 1 13046 13047 #13046=(1,2304,384)f32 #13047=(1,2304,384)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.mlp.drop 1 1 13047 13048 #13047=(1,2304,384)f32 #13048=(1,2304,384)f32 nn.Linear layers_dfe.4.residual_group.blocks.5.mlp.fc2 1 1 13048 13049 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #13048=(1,2304,384)f32 #13049=(1,2304,192)f32 nn.Dropout layers_dfe.4.residual_group.blocks.5.mlp.drop 1 1 13049 13050 #13049=(1,2304,192)f32 #13050=(1,2304,192)f32 prim::Constant pnnx_13334 0 1 13051 value=None prim::Constant pnnx_13335 0 1 24233 value=1 aten::add pnnx_13336 3 1 input.297 13050 24233 13052 #input.297=(1,2304,192)f32 #13050=(1,2304,192)f32 #13052=(1,2304,192)f32 prim::Constant pnnx_13337 0 1 13053 value=0 prim::Constant pnnx_13338 0 1 13054 value=1 prim::Constant pnnx_13339 0 1 13055 value=2 prim::Constant pnnx_13340 0 1 13056 value=192 aten::size pnnx_13341 2 1 13052 13053 13057 #13052=(1,2304,192)f32 prim::NumToTensor pnnx_13342 1 1 13057 B.159 aten::Int pnnx_13343 1 1 B.159 13059 prim::ListConstruct pnnx_13345 4 1 13059 13056 890 1130 13061 torch.transpose torch.transpose_3115 3 1 13052 13054 13055 13060 $input=13052 $dim0=13054 $dim1=13055 #13052=(1,2304,192)f32 #13060=(1,192,2304)f32 Tensor.view Tensor.view_1711 2 1 13060 13061 input.299 $input=13060 $shape=13061 #13060=(1,192,2304)f32 #input.299=(1,192,48,48)f32 nn.Conv2d layers_dfe.4.conv 1 1 input.299 13063 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.299=(1,192,48,48)f32 #13063=(1,192,48,48)f32 prim::Constant pnnx_13347 0 1 13064 value=-1 prim::Constant pnnx_13348 0 1 13065 value=2 prim::Constant pnnx_13349 0 1 13066 value=1 prim::Constant pnnx_13351 0 1 24234 value=2 torch.flatten torch.flatten_2195 3 1 13063 13065 13064 13067 $input=13063 $start_dim=13065 $end_dim=13064 #13063=(1,192,48,48)f32 #13067=(1,192,2304)f32 torch.transpose torch.transpose_3116 3 1 13067 13066 24234 13068 $input=13067 $dim0=13066 $dim1=24234 #13067=(1,192,2304)f32 #13068=(1,2304,192)f32 aten::add pnnx_13353 3 1 13068 12087 12088 13069 #13068=(1,2304,192)f32 #12087=(1,2304,192)f32 #13069=(1,2304,192)f32 prim::Constant pnnx_13354 0 1 13070 value=1 prim::Constant pnnx_13355 0 1 13087 value=trunc prim::Constant pnnx_13356 0 1 13088 value=8 prim::Constant pnnx_13357 0 1 13089 value=0 prim::Constant pnnx_13358 0 1 13090 value=2 prim::Constant pnnx_13359 0 1 13091 value=1 prim::Constant pnnx_13360 0 1 13092 value=3 prim::Constant pnnx_13361 0 1 13093 value=8 prim::Constant pnnx_13362 0 1 13094 value=4 prim::Constant pnnx_13363 0 1 13095 value=5 prim::Constant pnnx_13364 0 1 13096 value=-1 prim::Constant pnnx_13365 0 1 13097 value=64 aten::size pnnx_13366 2 1 13069 13089 13103 #13069=(1,2304,192)f32 prim::NumToTensor pnnx_13367 1 1 13103 B.163 aten::Int pnnx_13368 1 1 B.163 13105 aten::Int pnnx_13369 1 1 B.163 13106 aten::size pnnx_13370 2 1 13069 13090 13107 #13069=(1,2304,192)f32 prim::NumToTensor pnnx_13371 1 1 13107 C.275 aten::Int pnnx_13372 1 1 C.275 13109 aten::Int pnnx_13373 1 1 C.275 13110 aten::Int pnnx_13374 1 1 C.275 13111 aten::Int pnnx_13375 1 1 C.275 13112 nn.LayerNorm layers_dfe.5.residual_group.blocks.0.norm1 1 1 13069 13113 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #13069=(1,2304,192)f32 #13113=(1,2304,192)f32 prim::ListConstruct pnnx_13376 4 1 13106 887 1127 13112 13114 prim::Constant pnnx_13378 0 1 24235 value=0 Tensor.view Tensor.view_1712 2 1 13113 13114 x.135 $input=13113 $shape=13114 #13113=(1,2304,192)f32 #x.135=(1,48,48,192)f32 aten::size pnnx_13379 2 1 x.135 24235 13116 #x.135=(1,48,48,192)f32 prim::NumToTensor pnnx_13380 1 1 13116 B1.2 aten::Int pnnx_13381 1 1 B1.2 13118 aten::size pnnx_13382 2 1 x.135 13091 13119 #x.135=(1,48,48,192)f32 prim::NumToTensor pnnx_13383 1 1 13119 13120 prim::Constant pnnx_13384 0 1 24236 value=2 aten::size pnnx_13385 2 1 x.135 24236 13121 #x.135=(1,48,48,192)f32 prim::NumToTensor pnnx_13386 1 1 13121 13122 aten::size pnnx_13387 2 1 x.135 13092 13123 #x.135=(1,48,48,192)f32 prim::NumToTensor pnnx_13388 1 1 13123 C1.2 aten::Int pnnx_13389 1 1 C1.2 13125 aten::Int pnnx_13390 1 1 C1.2 13126 aten::div pnnx_13391 3 1 13120 13088 13087 13127 aten::Int pnnx_13392 1 1 13127 13128 prim::Constant pnnx_13393 0 1 24237 value=8 prim::Constant pnnx_13394 0 1 24238 value=trunc aten::div pnnx_13395 3 1 13122 24237 24238 13129 aten::Int pnnx_13396 1 1 13129 13130 prim::Constant pnnx_13397 0 1 24239 value=8 prim::ListConstruct pnnx_13398 6 1 13118 13128 13093 13130 24239 13126 13131 prim::Constant pnnx_13400 0 1 24240 value=0 prim::Constant pnnx_13401 0 1 24241 value=1 prim::Constant pnnx_13402 0 1 24242 value=3 prim::Constant pnnx_13403 0 1 24243 value=2 prim::ListConstruct pnnx_13404 6 1 24240 24241 24242 24243 13094 13095 13133 Tensor.view Tensor.view_1713 2 1 x.135 13131 x5.69 $input=x.135 $shape=13131 #x.135=(1,48,48,192)f32 #x5.69=(1,6,8,6,8,192)f32 prim::Constant pnnx_13408 0 1 24245 value=8 prim::Constant pnnx_13409 0 1 24246 value=8 prim::ListConstruct pnnx_13410 4 1 13096 24245 24246 13125 13136 torch.permute torch.permute_2792 2 1 x5.69 13133 13134 $input=x5.69 $dims=13133 #x5.69=(1,6,8,6,8,192)f32 #13134=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_200 1 1 13134 13135 memory_format=torch.contiguous_format $input=13134 #13134=(1,6,6,8,8,192)f32 #13135=(1,6,6,8,8,192)f32 prim::Constant pnnx_13412 0 1 24247 value=-1 prim::ListConstruct pnnx_13413 3 1 24247 13097 13111 13138 prim::Constant pnnx_13415 0 1 13140 value=1.767767e-01 prim::Constant pnnx_13416 0 1 13141 value=trunc prim::Constant pnnx_13417 0 1 13142 value=6 prim::Constant pnnx_13418 0 1 13143 value=0 prim::Constant pnnx_13419 0 1 13144 value=1 prim::Constant pnnx_13420 0 1 13145 value=2 prim::Constant pnnx_13421 0 1 13146 value=3 prim::Constant pnnx_13422 0 1 13147 value=6 prim::Constant pnnx_13423 0 1 13148 value=4 prim::Constant pnnx_13424 0 1 13149 value=-2 prim::Constant pnnx_13425 0 1 13150 value=-1 prim::Constant pnnx_13426 0 1 13151 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.0.attn 0 1 relative_position_bias_table.135 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.135=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.0.attn 0 1 relative_position_index.135 @relative_position_index=(64,64)i64 #relative_position_index.135=(64,64)i64 Tensor.view Tensor.view_1714 2 1 13135 13136 x_windows.135 $input=13135 $shape=13136 #13135=(1,6,6,8,8,192)f32 #x_windows.135=(36,8,8,192)f32 Tensor.view Tensor.view_1715 2 1 x_windows.135 13138 x6.2 $input=x_windows.135 $shape=13138 #x_windows.135=(36,8,8,192)f32 #x6.2=(36,64,192)f32 aten::size pnnx_13427 2 1 x6.2 13143 13159 #x6.2=(36,64,192)f32 prim::NumToTensor pnnx_13428 1 1 13159 B_.135 aten::Int pnnx_13429 1 1 B_.135 13161 aten::Int pnnx_13430 1 1 B_.135 13162 aten::size pnnx_13431 2 1 x6.2 13144 13163 #x6.2=(36,64,192)f32 prim::NumToTensor pnnx_13432 1 1 13163 N.135 aten::Int pnnx_13433 1 1 N.135 13165 aten::Int pnnx_13434 1 1 N.135 13166 aten::size pnnx_13435 2 1 x6.2 13145 13167 #x6.2=(36,64,192)f32 prim::NumToTensor pnnx_13436 1 1 13167 C.277 aten::Int pnnx_13437 1 1 C.277 13169 nn.Linear layers_dfe.5.residual_group.blocks.0.attn.qkv 1 1 x6.2 13170 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.2=(36,64,192)f32 #13170=(36,64,576)f32 aten::div pnnx_13438 3 1 C.277 13142 13141 13171 aten::Int pnnx_13439 1 1 13171 13172 prim::ListConstruct pnnx_13440 5 1 13162 13166 13146 13147 13172 13173 prim::Constant pnnx_13442 0 1 24248 value=2 prim::Constant pnnx_13443 0 1 24249 value=0 prim::Constant pnnx_13444 0 1 24250 value=3 prim::Constant pnnx_13445 0 1 24251 value=1 prim::ListConstruct pnnx_13446 5 1 24248 24249 24250 24251 13148 13175 Tensor.reshape Tensor.reshape_566 2 1 13170 13173 13174 $input=13170 $shape=13173 #13170=(36,64,576)f32 #13174=(36,64,3,6,32)f32 prim::Constant pnnx_13448 0 1 24252 value=0 prim::Constant pnnx_13449 0 1 24253 value=0 prim::Constant pnnx_13451 0 1 24254 value=0 prim::Constant pnnx_13452 0 1 24255 value=1 prim::Constant pnnx_13454 0 1 24256 value=0 prim::Constant pnnx_13455 0 1 24257 value=2 torch.permute torch.permute_2793 2 1 13174 13175 qkv1.2 $input=13174 $dims=13175 #13174=(36,64,3,6,32)f32 #qkv1.2=(3,36,6,64,32)f32 Tensor.select Tensor.select_848 3 1 qkv1.2 24252 24253 q.135 $input=qkv1.2 $dim=24252 $index=24253 #qkv1.2=(3,36,6,64,32)f32 #q.135=(36,6,64,32)f32 aten::mul pnnx_13457 2 1 q.135 13140 q1.2 #q.135=(36,6,64,32)f32 #q1.2=(36,6,64,32)f32 Tensor.select Tensor.select_849 3 1 qkv1.2 24254 24255 k.135 $input=qkv1.2 $dim=24254 $index=24255 #qkv1.2=(3,36,6,64,32)f32 #k.135=(36,6,64,32)f32 prim::Constant pnnx_13460 0 1 24258 value=-1 prim::ListConstruct pnnx_13461 1 1 24258 13183 Tensor.view Tensor.view_1716 2 1 relative_position_index.135 13183 13184 $input=relative_position_index.135 $shape=13183 #relative_position_index.135=(64,64)i64 #13184=(4096)i64 prim::ListConstruct pnnx_13463 1 1 13184 13185 #13184=(4096)i64 prim::Constant pnnx_13465 0 1 24259 value=64 prim::Constant pnnx_13466 0 1 24260 value=-1 prim::ListConstruct pnnx_13467 3 1 13151 24259 24260 13187 Tensor.index Tensor.index_392 2 1 relative_position_bias_table.135 13185 13186 $input=relative_position_bias_table.135 $expr=13185 #relative_position_bias_table.135=(225,6)f32 #13186=(4096,6)f32 prim::Constant pnnx_13469 0 1 24261 value=2 prim::Constant pnnx_13470 0 1 24262 value=0 prim::Constant pnnx_13471 0 1 24263 value=1 prim::ListConstruct pnnx_13472 3 1 24261 24262 24263 13189 Tensor.view Tensor.view_1717 2 1 13186 13187 relative_position_bias.135 $input=13186 $shape=13187 #13186=(4096,6)f32 #relative_position_bias.135=(64,64,6)f32 prim::Constant pnnx_13476 0 1 24265 value=0 torch.permute torch.permute_2794 2 1 relative_position_bias.135 13189 13190 $input=relative_position_bias.135 $dims=13189 #relative_position_bias.135=(64,64,6)f32 #13190=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_201 1 1 13190 relative_position_bias1.2 memory_format=torch.contiguous_format $input=13190 #13190=(6,64,64)f32 #relative_position_bias1.2=(6,64,64)f32 prim::Constant pnnx_13478 0 1 24266 value=1 torch.transpose torch.transpose_3117 3 1 k.135 13149 13150 13181 $input=k.135 $dim0=13149 $dim1=13150 #k.135=(36,6,64,32)f32 #13181=(36,6,32,64)f32 torch.matmul torch.matmul_2336 2 1 q1.2 13181 attn.271 $input=q1.2 $other=13181 #q1.2=(36,6,64,32)f32 #13181=(36,6,32,64)f32 #attn.271=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3350 2 1 relative_position_bias1.2 24265 13192 $input=relative_position_bias1.2 $dim=24265 #relative_position_bias1.2=(6,64,64)f32 #13192=(1,6,64,64)f32 aten::add pnnx_13479 3 1 attn.271 13192 24266 input.307 #attn.271=(36,6,64,64)f32 #13192=(1,6,64,64)f32 #input.307=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.0.attn.softmax 1 1 input.307 13194 dim=-1 #input.307=(36,6,64,64)f32 #13194=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.attn.attn_drop 1 1 13194 13195 #13194=(36,6,64,64)f32 #13195=(36,6,64,64)f32 Tensor.select Tensor.select_850 3 1 qkv1.2 24256 24257 v.135 $input=qkv1.2 $dim=24256 $index=24257 #qkv1.2=(3,36,6,64,32)f32 #v.135=(36,6,64,32)f32 prim::Constant pnnx_13481 0 1 24267 value=1 prim::Constant pnnx_13482 0 1 24268 value=2 torch.matmul torch.matmul_2337 2 1 13195 v.135 13196 $input=13195 $other=v.135 #13195=(36,6,64,64)f32 #v.135=(36,6,64,32)f32 #13196=(36,6,64,32)f32 prim::ListConstruct pnnx_13484 3 1 13161 13165 13169 13198 torch.transpose torch.transpose_3118 3 1 13196 24267 24268 13197 $input=13196 $dim0=24267 $dim1=24268 #13196=(36,6,64,32)f32 #13197=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_567 2 1 13197 13198 input1.2 $input=13197 $shape=13198 #13197=(36,64,6,32)f32 #input1.2=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.0.attn.proj 1 1 input1.2 13200 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.2=(36,64,192)f32 #13200=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.attn.proj_drop 1 1 13200 13201 #13200=(36,64,192)f32 #13201=(36,64,192)f32 prim::Constant pnnx_13486 0 1 24269 value=-1 prim::Constant pnnx_13487 0 1 24270 value=8 prim::Constant pnnx_13488 0 1 24271 value=8 prim::ListConstruct pnnx_13489 4 1 24269 24270 24271 13110 13202 prim::Constant pnnx_13491 0 1 24272 value=8 prim::Constant pnnx_13492 0 1 24273 value=trunc aten::div pnnx_13493 3 1 H0.1 24272 24273 13204 aten::Int pnnx_13494 1 1 13204 13205 prim::Constant pnnx_13495 0 1 24274 value=8 prim::Constant pnnx_13496 0 1 24275 value=trunc aten::div pnnx_13497 3 1 W0.1 24274 24275 13206 aten::Int pnnx_13498 1 1 13206 13207 prim::Constant pnnx_13499 0 1 24276 value=1 prim::Constant pnnx_13500 0 1 24277 value=8 prim::Constant pnnx_13501 0 1 24278 value=8 prim::Constant pnnx_13502 0 1 24279 value=-1 prim::ListConstruct pnnx_13503 6 1 24276 13205 13207 24277 24278 24279 13208 prim::Constant pnnx_13505 0 1 24280 value=0 prim::Constant pnnx_13506 0 1 24281 value=1 prim::Constant pnnx_13507 0 1 24282 value=3 prim::Constant pnnx_13508 0 1 24283 value=2 prim::Constant pnnx_13509 0 1 24284 value=4 prim::Constant pnnx_13510 0 1 24285 value=5 prim::ListConstruct pnnx_13511 6 1 24280 24281 24282 24283 24284 24285 13210 Tensor.view Tensor.view_1718 2 1 13201 13202 windows.135 $input=13201 $shape=13202 #13201=(36,64,192)f32 #windows.135=(36,8,8,192)f32 Tensor.view Tensor.view_1719 2 1 windows.135 13208 x7.2 $input=windows.135 $shape=13208 #windows.135=(36,8,8,192)f32 #x7.2=(1,6,6,8,8,192)f32 prim::Constant pnnx_13515 0 1 24287 value=1 prim::Constant pnnx_13516 0 1 24288 value=-1 prim::ListConstruct pnnx_13517 4 1 24287 884 1124 24288 13213 torch.permute torch.permute_2795 2 1 x7.2 13210 13211 $input=x7.2 $dims=13210 #x7.2=(1,6,6,8,8,192)f32 #13211=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_202 1 1 13211 13212 memory_format=torch.contiguous_format $input=13211 #13211=(1,6,8,6,8,192)f32 #13212=(1,6,8,6,8,192)f32 aten::mul pnnx_13519 2 1 H0.1 W0.1 13215 aten::Int pnnx_13520 1 1 13215 13216 prim::ListConstruct pnnx_13521 3 1 13105 13216 13109 13217 prim::Constant pnnx_13523 0 1 13219 value=None prim::Constant pnnx_13524 0 1 24289 value=1 Tensor.view Tensor.view_1720 2 1 13212 13213 x8.2 $input=13212 $shape=13213 #13212=(1,6,8,6,8,192)f32 #x8.2=(1,48,48,192)f32 Tensor.view Tensor.view_1721 2 1 x8.2 13217 x9.2 $input=x8.2 $shape=13217 #x8.2=(1,48,48,192)f32 #x9.2=(1,2304,192)f32 aten::add pnnx_13525 3 1 13069 x9.2 24289 input.309 #13069=(1,2304,192)f32 #x9.2=(1,2304,192)f32 #input.309=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.0.norm2 1 1 input.309 13221 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.309=(1,2304,192)f32 #13221=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.0.mlp.fc1 1 1 13221 13226 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #13221=(1,2304,192)f32 #13226=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.0.mlp.act 1 1 13226 13227 #13226=(1,2304,384)f32 #13227=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.mlp.drop 1 1 13227 13228 #13227=(1,2304,384)f32 #13228=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.0.mlp.fc2 1 1 13228 13229 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #13228=(1,2304,384)f32 #13229=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.0.mlp.drop 1 1 13229 13230 #13229=(1,2304,192)f32 #13230=(1,2304,192)f32 prim::Constant pnnx_13526 0 1 13231 value=None prim::Constant pnnx_13527 0 1 24290 value=1 aten::add pnnx_13528 3 1 input.309 13230 24290 13232 #input.309=(1,2304,192)f32 #13230=(1,2304,192)f32 #13232=(1,2304,192)f32 prim::Constant pnnx_13529 0 1 13233 value=trunc prim::Constant pnnx_13530 0 1 13234 value=8 prim::Constant pnnx_13531 0 1 13235 value=0 prim::Constant pnnx_13532 0 1 13236 value=2 prim::Constant pnnx_13533 0 1 13237 value=-4 prim::Constant pnnx_13534 0 1 13238 value=1 prim::Constant pnnx_13535 0 1 13239 value=3 prim::Constant pnnx_13536 0 1 13240 value=8 prim::Constant pnnx_13537 0 1 13241 value=4 prim::Constant pnnx_13538 0 1 13242 value=5 prim::Constant pnnx_13539 0 1 13243 value=-1 prim::Constant pnnx_13540 0 1 13244 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.1 0 1 attn_mask.69 @attn_mask=(36,64,64)f32 #attn_mask.69=(36,64,64)f32 aten::size pnnx_13541 2 1 13232 13235 13251 #13232=(1,2304,192)f32 prim::NumToTensor pnnx_13542 1 1 13251 B.165 aten::Int pnnx_13543 1 1 B.165 13253 aten::Int pnnx_13544 1 1 B.165 13254 aten::size pnnx_13545 2 1 13232 13236 13255 #13232=(1,2304,192)f32 prim::NumToTensor pnnx_13546 1 1 13255 C.279 aten::Int pnnx_13547 1 1 C.279 13257 aten::Int pnnx_13548 1 1 C.279 13258 aten::Int pnnx_13549 1 1 C.279 13259 aten::Int pnnx_13550 1 1 C.279 13260 nn.LayerNorm layers_dfe.5.residual_group.blocks.1.norm1 1 1 13232 13261 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #13232=(1,2304,192)f32 #13261=(1,2304,192)f32 prim::ListConstruct pnnx_13551 4 1 13254 881 1121 13260 13262 prim::Constant pnnx_13553 0 1 24291 value=-4 prim::ListConstruct pnnx_13554 2 1 13237 24291 13264 prim::Constant pnnx_13555 0 1 24292 value=2 prim::ListConstruct pnnx_13556 2 1 13238 24292 13265 Tensor.view Tensor.view_1722 2 1 13261 13262 x.137 $input=13261 $shape=13262 #13261=(1,2304,192)f32 #x.137=(1,48,48,192)f32 prim::Constant pnnx_13558 0 1 24293 value=0 torch.roll torch.roll_2486 3 1 x.137 13264 13265 x6.4 $input=x.137 $shifts=13264 $dims=13265 #x.137=(1,48,48,192)f32 #x6.4=(1,48,48,192)f32 aten::size pnnx_13559 2 1 x6.4 24293 13267 #x6.4=(1,48,48,192)f32 prim::NumToTensor pnnx_13560 1 1 13267 B1.4 aten::Int pnnx_13561 1 1 B1.4 13269 prim::Constant pnnx_13562 0 1 24294 value=1 aten::size pnnx_13563 2 1 x6.4 24294 13270 #x6.4=(1,48,48,192)f32 prim::NumToTensor pnnx_13564 1 1 13270 13271 prim::Constant pnnx_13565 0 1 24295 value=2 aten::size pnnx_13566 2 1 x6.4 24295 13272 #x6.4=(1,48,48,192)f32 prim::NumToTensor pnnx_13567 1 1 13272 13273 aten::size pnnx_13568 2 1 x6.4 13239 13274 #x6.4=(1,48,48,192)f32 prim::NumToTensor pnnx_13569 1 1 13274 C1.4 aten::Int pnnx_13570 1 1 C1.4 13276 aten::Int pnnx_13571 1 1 C1.4 13277 aten::div pnnx_13572 3 1 13271 13234 13233 13278 aten::Int pnnx_13573 1 1 13278 13279 prim::Constant pnnx_13574 0 1 24296 value=8 prim::Constant pnnx_13575 0 1 24297 value=trunc aten::div pnnx_13576 3 1 13273 24296 24297 13280 aten::Int pnnx_13577 1 1 13280 13281 prim::Constant pnnx_13578 0 1 24298 value=8 prim::ListConstruct pnnx_13579 6 1 13269 13279 13240 13281 24298 13277 13282 prim::Constant pnnx_13581 0 1 24299 value=0 prim::Constant pnnx_13582 0 1 24300 value=1 prim::Constant pnnx_13583 0 1 24301 value=3 prim::Constant pnnx_13584 0 1 24302 value=2 prim::ListConstruct pnnx_13585 6 1 24299 24300 24301 24302 13241 13242 13284 Tensor.view Tensor.view_1723 2 1 x6.4 13282 x7.4 $input=x6.4 $shape=13282 #x6.4=(1,48,48,192)f32 #x7.4=(1,6,8,6,8,192)f32 prim::Constant pnnx_13589 0 1 24304 value=8 prim::Constant pnnx_13590 0 1 24305 value=8 prim::ListConstruct pnnx_13591 4 1 13243 24304 24305 13276 13287 torch.permute torch.permute_2796 2 1 x7.4 13284 13285 $input=x7.4 $dims=13284 #x7.4=(1,6,8,6,8,192)f32 #13285=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_203 1 1 13285 13286 memory_format=torch.contiguous_format $input=13285 #13285=(1,6,6,8,8,192)f32 #13286=(1,6,6,8,8,192)f32 prim::Constant pnnx_13593 0 1 24306 value=-1 prim::ListConstruct pnnx_13594 3 1 24306 13244 13259 13289 prim::Constant pnnx_13596 0 1 13291 value=1.767767e-01 prim::Constant pnnx_13597 0 1 13292 value=trunc prim::Constant pnnx_13598 0 1 13293 value=6 prim::Constant pnnx_13599 0 1 13294 value=0 prim::Constant pnnx_13600 0 1 13295 value=1 prim::Constant pnnx_13601 0 1 13296 value=2 prim::Constant pnnx_13602 0 1 13297 value=3 prim::Constant pnnx_13603 0 1 13298 value=6 prim::Constant pnnx_13604 0 1 13299 value=4 prim::Constant pnnx_13605 0 1 13300 value=-2 prim::Constant pnnx_13606 0 1 13301 value=-1 prim::Constant pnnx_13607 0 1 13302 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.1.attn 0 1 relative_position_bias_table.137 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.137=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.1.attn 0 1 relative_position_index.137 @relative_position_index=(64,64)i64 #relative_position_index.137=(64,64)i64 Tensor.view Tensor.view_1724 2 1 13286 13287 x_windows.137 $input=13286 $shape=13287 #13286=(1,6,6,8,8,192)f32 #x_windows.137=(36,8,8,192)f32 Tensor.view Tensor.view_1725 2 1 x_windows.137 13289 x8.4 $input=x_windows.137 $shape=13289 #x_windows.137=(36,8,8,192)f32 #x8.4=(36,64,192)f32 aten::size pnnx_13608 2 1 x8.4 13294 13310 #x8.4=(36,64,192)f32 prim::NumToTensor pnnx_13609 1 1 13310 B_.137 aten::Int pnnx_13610 1 1 B_.137 13312 aten::Int pnnx_13611 1 1 B_.137 13313 aten::size pnnx_13612 2 1 x8.4 13295 13314 #x8.4=(36,64,192)f32 prim::NumToTensor pnnx_13613 1 1 13314 N.137 aten::Int pnnx_13614 1 1 N.137 13316 aten::Int pnnx_13615 1 1 N.137 13317 aten::Int pnnx_13616 1 1 N.137 13318 aten::Int pnnx_13617 1 1 N.137 13319 aten::Int pnnx_13618 1 1 N.137 13320 aten::Int pnnx_13619 1 1 N.137 13321 aten::size pnnx_13620 2 1 x8.4 13296 13322 #x8.4=(36,64,192)f32 prim::NumToTensor pnnx_13621 1 1 13322 C.281 aten::Int pnnx_13622 1 1 C.281 13324 nn.Linear layers_dfe.5.residual_group.blocks.1.attn.qkv 1 1 x8.4 13325 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.4=(36,64,192)f32 #13325=(36,64,576)f32 aten::div pnnx_13623 3 1 C.281 13293 13292 13326 aten::Int pnnx_13624 1 1 13326 13327 prim::ListConstruct pnnx_13625 5 1 13313 13321 13297 13298 13327 13328 prim::Constant pnnx_13627 0 1 24307 value=2 prim::Constant pnnx_13628 0 1 24308 value=0 prim::Constant pnnx_13629 0 1 24309 value=3 prim::Constant pnnx_13630 0 1 24310 value=1 prim::ListConstruct pnnx_13631 5 1 24307 24308 24309 24310 13299 13330 Tensor.reshape Tensor.reshape_568 2 1 13325 13328 13329 $input=13325 $shape=13328 #13325=(36,64,576)f32 #13329=(36,64,3,6,32)f32 prim::Constant pnnx_13633 0 1 24311 value=0 prim::Constant pnnx_13634 0 1 24312 value=0 prim::Constant pnnx_13636 0 1 24313 value=0 prim::Constant pnnx_13637 0 1 24314 value=1 prim::Constant pnnx_13639 0 1 24315 value=0 prim::Constant pnnx_13640 0 1 24316 value=2 torch.permute torch.permute_2797 2 1 13329 13330 qkv1.4 $input=13329 $dims=13330 #13329=(36,64,3,6,32)f32 #qkv1.4=(3,36,6,64,32)f32 Tensor.select Tensor.select_851 3 1 qkv1.4 24311 24312 q.137 $input=qkv1.4 $dim=24311 $index=24312 #qkv1.4=(3,36,6,64,32)f32 #q.137=(36,6,64,32)f32 aten::mul pnnx_13642 2 1 q.137 13291 q1.4 #q.137=(36,6,64,32)f32 #q1.4=(36,6,64,32)f32 Tensor.select Tensor.select_852 3 1 qkv1.4 24313 24314 k.137 $input=qkv1.4 $dim=24313 $index=24314 #qkv1.4=(3,36,6,64,32)f32 #k.137=(36,6,64,32)f32 prim::Constant pnnx_13645 0 1 24317 value=-1 prim::ListConstruct pnnx_13646 1 1 24317 13338 Tensor.view Tensor.view_1726 2 1 relative_position_index.137 13338 13339 $input=relative_position_index.137 $shape=13338 #relative_position_index.137=(64,64)i64 #13339=(4096)i64 prim::ListConstruct pnnx_13648 1 1 13339 13340 #13339=(4096)i64 prim::Constant pnnx_13650 0 1 24318 value=64 prim::Constant pnnx_13651 0 1 24319 value=-1 prim::ListConstruct pnnx_13652 3 1 13302 24318 24319 13342 Tensor.index Tensor.index_393 2 1 relative_position_bias_table.137 13340 13341 $input=relative_position_bias_table.137 $expr=13340 #relative_position_bias_table.137=(225,6)f32 #13341=(4096,6)f32 prim::Constant pnnx_13654 0 1 24320 value=2 prim::Constant pnnx_13655 0 1 24321 value=0 prim::Constant pnnx_13656 0 1 24322 value=1 prim::ListConstruct pnnx_13657 3 1 24320 24321 24322 13344 Tensor.view Tensor.view_1727 2 1 13341 13342 relative_position_bias.137 $input=13341 $shape=13342 #13341=(4096,6)f32 #relative_position_bias.137=(64,64,6)f32 prim::Constant pnnx_13661 0 1 24324 value=0 torch.permute torch.permute_2798 2 1 relative_position_bias.137 13344 13345 $input=relative_position_bias.137 $dims=13344 #relative_position_bias.137=(64,64,6)f32 #13345=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_204 1 1 13345 relative_position_bias1.4 memory_format=torch.contiguous_format $input=13345 #13345=(6,64,64)f32 #relative_position_bias1.4=(6,64,64)f32 prim::Constant pnnx_13663 0 1 24325 value=1 torch.transpose torch.transpose_3119 3 1 k.137 13300 13301 13336 $input=k.137 $dim0=13300 $dim1=13301 #k.137=(36,6,64,32)f32 #13336=(36,6,32,64)f32 torch.matmul torch.matmul_2338 2 1 q1.4 13336 attn.275 $input=q1.4 $other=13336 #q1.4=(36,6,64,32)f32 #13336=(36,6,32,64)f32 #attn.275=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3351 2 1 relative_position_bias1.4 24324 13347 $input=relative_position_bias1.4 $dim=24324 #relative_position_bias1.4=(6,64,64)f32 #13347=(1,6,64,64)f32 aten::add pnnx_13664 3 1 attn.275 13347 24325 attn2.2 #attn.275=(36,6,64,64)f32 #13347=(1,6,64,64)f32 #attn2.2=(36,6,64,64)f32 prim::Constant pnnx_13665 0 1 24326 value=0 aten::size pnnx_13666 2 1 attn_mask.69 24326 13349 #attn_mask.69=(36,64,64)f32 prim::NumToTensor pnnx_13667 1 1 13349 other.69 aten::Int pnnx_13668 1 1 other.69 13351 prim::Constant pnnx_13669 0 1 24327 value=trunc aten::div pnnx_13670 3 1 B_.137 other.69 24327 13352 aten::Int pnnx_13671 1 1 13352 13353 prim::Constant pnnx_13672 0 1 24328 value=6 prim::ListConstruct pnnx_13673 5 1 13353 13351 24328 13320 13319 13354 prim::Constant pnnx_13675 0 1 24329 value=1 prim::Constant pnnx_13677 0 1 24330 value=0 prim::Constant pnnx_13679 0 1 24331 value=1 Tensor.view Tensor.view_1728 2 1 attn2.2 13354 13355 $input=attn2.2 $shape=13354 #attn2.2=(36,6,64,64)f32 #13355=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3352 2 1 attn_mask.69 24329 13356 $input=attn_mask.69 $dim=24329 #attn_mask.69=(36,64,64)f32 #13356=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3353 2 1 13356 24330 13357 $input=13356 $dim=24330 #13356=(36,1,64,64)f32 #13357=(1,36,1,64,64)f32 aten::add pnnx_13680 3 1 13355 13357 24331 attn3.2 #13355=(1,36,6,64,64)f32 #13357=(1,36,1,64,64)f32 #attn3.2=(1,36,6,64,64)f32 prim::Constant pnnx_13681 0 1 24332 value=-1 prim::Constant pnnx_13682 0 1 24333 value=6 prim::ListConstruct pnnx_13683 4 1 24332 24333 13318 13317 13359 Tensor.view Tensor.view_1729 2 1 attn3.2 13359 input.311 $input=attn3.2 $shape=13359 #attn3.2=(1,36,6,64,64)f32 #input.311=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.1.attn.softmax 1 1 input.311 13361 dim=-1 #input.311=(36,6,64,64)f32 #13361=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.attn.attn_drop 1 1 13361 13362 #13361=(36,6,64,64)f32 #13362=(36,6,64,64)f32 Tensor.select Tensor.select_853 3 1 qkv1.4 24315 24316 v.137 $input=qkv1.4 $dim=24315 $index=24316 #qkv1.4=(3,36,6,64,32)f32 #v.137=(36,6,64,32)f32 prim::Constant pnnx_13686 0 1 24334 value=1 prim::Constant pnnx_13687 0 1 24335 value=2 torch.matmul torch.matmul_2339 2 1 13362 v.137 13363 $input=13362 $other=v.137 #13362=(36,6,64,64)f32 #v.137=(36,6,64,32)f32 #13363=(36,6,64,32)f32 prim::ListConstruct pnnx_13689 3 1 13312 13316 13324 13365 torch.transpose torch.transpose_3120 3 1 13363 24334 24335 13364 $input=13363 $dim0=24334 $dim1=24335 #13363=(36,6,64,32)f32 #13364=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_569 2 1 13364 13365 input1.4 $input=13364 $shape=13365 #13364=(36,64,6,32)f32 #input1.4=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.1.attn.proj 1 1 input1.4 13367 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.4=(36,64,192)f32 #13367=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.attn.proj_drop 1 1 13367 13368 #13367=(36,64,192)f32 #13368=(36,64,192)f32 prim::Constant pnnx_13691 0 1 24336 value=-1 prim::Constant pnnx_13692 0 1 24337 value=8 prim::Constant pnnx_13693 0 1 24338 value=8 prim::ListConstruct pnnx_13694 4 1 24336 24337 24338 13258 13369 prim::Constant pnnx_13696 0 1 24339 value=8 prim::Constant pnnx_13697 0 1 24340 value=trunc aten::div pnnx_13698 3 1 H0.1 24339 24340 13371 aten::Int pnnx_13699 1 1 13371 13372 prim::Constant pnnx_13700 0 1 24341 value=8 prim::Constant pnnx_13701 0 1 24342 value=trunc aten::div pnnx_13702 3 1 W0.1 24341 24342 13373 aten::Int pnnx_13703 1 1 13373 13374 prim::Constant pnnx_13704 0 1 24343 value=1 prim::Constant pnnx_13705 0 1 24344 value=8 prim::Constant pnnx_13706 0 1 24345 value=8 prim::Constant pnnx_13707 0 1 24346 value=-1 prim::ListConstruct pnnx_13708 6 1 24343 13372 13374 24344 24345 24346 13375 prim::Constant pnnx_13710 0 1 24347 value=0 prim::Constant pnnx_13711 0 1 24348 value=1 prim::Constant pnnx_13712 0 1 24349 value=3 prim::Constant pnnx_13713 0 1 24350 value=2 prim::Constant pnnx_13714 0 1 24351 value=4 prim::Constant pnnx_13715 0 1 24352 value=5 prim::ListConstruct pnnx_13716 6 1 24347 24348 24349 24350 24351 24352 13377 Tensor.view Tensor.view_1730 2 1 13368 13369 windows.137 $input=13368 $shape=13369 #13368=(36,64,192)f32 #windows.137=(36,8,8,192)f32 Tensor.view Tensor.view_1731 2 1 windows.137 13375 x9.4 $input=windows.137 $shape=13375 #windows.137=(36,8,8,192)f32 #x9.4=(1,6,6,8,8,192)f32 prim::Constant pnnx_13720 0 1 24354 value=1 prim::Constant pnnx_13721 0 1 24355 value=-1 prim::ListConstruct pnnx_13722 4 1 24354 878 1118 24355 13380 torch.permute torch.permute_2799 2 1 x9.4 13377 13378 $input=x9.4 $dims=13377 #x9.4=(1,6,6,8,8,192)f32 #13378=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_205 1 1 13378 13379 memory_format=torch.contiguous_format $input=13378 #13378=(1,6,8,6,8,192)f32 #13379=(1,6,8,6,8,192)f32 prim::Constant pnnx_13724 0 1 24356 value=4 prim::Constant pnnx_13725 0 1 24357 value=4 prim::ListConstruct pnnx_13726 2 1 24356 24357 13382 prim::Constant pnnx_13727 0 1 24358 value=1 prim::Constant pnnx_13728 0 1 24359 value=2 prim::ListConstruct pnnx_13729 2 1 24358 24359 13383 Tensor.view Tensor.view_1732 2 1 13379 13380 shifted_x.69 $input=13379 $shape=13380 #13379=(1,6,8,6,8,192)f32 #shifted_x.69=(1,48,48,192)f32 aten::mul pnnx_13731 2 1 H0.1 W0.1 13385 aten::Int pnnx_13732 1 1 13385 13386 prim::ListConstruct pnnx_13733 3 1 13253 13386 13257 13387 prim::Constant pnnx_13735 0 1 13389 value=None prim::Constant pnnx_13736 0 1 24360 value=1 torch.roll torch.roll_2487 3 1 shifted_x.69 13382 13383 x10.2 $input=shifted_x.69 $shifts=13382 $dims=13383 #shifted_x.69=(1,48,48,192)f32 #x10.2=(1,48,48,192)f32 Tensor.view Tensor.view_1733 2 1 x10.2 13387 x11.2 $input=x10.2 $shape=13387 #x10.2=(1,48,48,192)f32 #x11.2=(1,2304,192)f32 aten::add pnnx_13737 3 1 13232 x11.2 24360 input.313 #13232=(1,2304,192)f32 #x11.2=(1,2304,192)f32 #input.313=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.1.norm2 1 1 input.313 13391 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.313=(1,2304,192)f32 #13391=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.1.mlp.fc1 1 1 13391 13396 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #13391=(1,2304,192)f32 #13396=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.1.mlp.act 1 1 13396 13397 #13396=(1,2304,384)f32 #13397=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.mlp.drop 1 1 13397 13398 #13397=(1,2304,384)f32 #13398=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.1.mlp.fc2 1 1 13398 13399 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #13398=(1,2304,384)f32 #13399=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.1.mlp.drop 1 1 13399 13400 #13399=(1,2304,192)f32 #13400=(1,2304,192)f32 prim::Constant pnnx_13738 0 1 13401 value=None prim::Constant pnnx_13739 0 1 24361 value=1 aten::add pnnx_13740 3 1 input.313 13400 24361 13402 #input.313=(1,2304,192)f32 #13400=(1,2304,192)f32 #13402=(1,2304,192)f32 prim::Constant pnnx_13741 0 1 13403 value=trunc prim::Constant pnnx_13742 0 1 13404 value=8 prim::Constant pnnx_13743 0 1 13405 value=0 prim::Constant pnnx_13744 0 1 13406 value=2 prim::Constant pnnx_13745 0 1 13407 value=1 prim::Constant pnnx_13746 0 1 13408 value=3 prim::Constant pnnx_13747 0 1 13409 value=8 prim::Constant pnnx_13748 0 1 13410 value=4 prim::Constant pnnx_13749 0 1 13411 value=5 prim::Constant pnnx_13750 0 1 13412 value=-1 prim::Constant pnnx_13751 0 1 13413 value=64 aten::size pnnx_13752 2 1 13402 13405 13419 #13402=(1,2304,192)f32 prim::NumToTensor pnnx_13753 1 1 13419 B.167 aten::Int pnnx_13754 1 1 B.167 13421 aten::Int pnnx_13755 1 1 B.167 13422 aten::size pnnx_13756 2 1 13402 13406 13423 #13402=(1,2304,192)f32 prim::NumToTensor pnnx_13757 1 1 13423 C.283 aten::Int pnnx_13758 1 1 C.283 13425 aten::Int pnnx_13759 1 1 C.283 13426 aten::Int pnnx_13760 1 1 C.283 13427 aten::Int pnnx_13761 1 1 C.283 13428 nn.LayerNorm layers_dfe.5.residual_group.blocks.2.norm1 1 1 13402 13429 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #13402=(1,2304,192)f32 #13429=(1,2304,192)f32 prim::ListConstruct pnnx_13762 4 1 13422 875 1115 13428 13430 prim::Constant pnnx_13764 0 1 24362 value=0 Tensor.view Tensor.view_1734 2 1 13429 13430 x.139 $input=13429 $shape=13430 #13429=(1,2304,192)f32 #x.139=(1,48,48,192)f32 aten::size pnnx_13765 2 1 x.139 24362 13432 #x.139=(1,48,48,192)f32 prim::NumToTensor pnnx_13766 1 1 13432 B1.6 aten::Int pnnx_13767 1 1 B1.6 13434 aten::size pnnx_13768 2 1 x.139 13407 13435 #x.139=(1,48,48,192)f32 prim::NumToTensor pnnx_13769 1 1 13435 13436 prim::Constant pnnx_13770 0 1 24363 value=2 aten::size pnnx_13771 2 1 x.139 24363 13437 #x.139=(1,48,48,192)f32 prim::NumToTensor pnnx_13772 1 1 13437 13438 aten::size pnnx_13773 2 1 x.139 13408 13439 #x.139=(1,48,48,192)f32 prim::NumToTensor pnnx_13774 1 1 13439 C1.6 aten::Int pnnx_13775 1 1 C1.6 13441 aten::Int pnnx_13776 1 1 C1.6 13442 aten::div pnnx_13777 3 1 13436 13404 13403 13443 aten::Int pnnx_13778 1 1 13443 13444 prim::Constant pnnx_13779 0 1 24364 value=8 prim::Constant pnnx_13780 0 1 24365 value=trunc aten::div pnnx_13781 3 1 13438 24364 24365 13445 aten::Int pnnx_13782 1 1 13445 13446 prim::Constant pnnx_13783 0 1 24366 value=8 prim::ListConstruct pnnx_13784 6 1 13434 13444 13409 13446 24366 13442 13447 prim::Constant pnnx_13786 0 1 24367 value=0 prim::Constant pnnx_13787 0 1 24368 value=1 prim::Constant pnnx_13788 0 1 24369 value=3 prim::Constant pnnx_13789 0 1 24370 value=2 prim::ListConstruct pnnx_13790 6 1 24367 24368 24369 24370 13410 13411 13449 Tensor.view Tensor.view_1735 2 1 x.139 13447 x5.71 $input=x.139 $shape=13447 #x.139=(1,48,48,192)f32 #x5.71=(1,6,8,6,8,192)f32 prim::Constant pnnx_13794 0 1 24372 value=8 prim::Constant pnnx_13795 0 1 24373 value=8 prim::ListConstruct pnnx_13796 4 1 13412 24372 24373 13441 13452 torch.permute torch.permute_2800 2 1 x5.71 13449 13450 $input=x5.71 $dims=13449 #x5.71=(1,6,8,6,8,192)f32 #13450=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_206 1 1 13450 13451 memory_format=torch.contiguous_format $input=13450 #13450=(1,6,6,8,8,192)f32 #13451=(1,6,6,8,8,192)f32 prim::Constant pnnx_13798 0 1 24374 value=-1 prim::ListConstruct pnnx_13799 3 1 24374 13413 13427 13454 prim::Constant pnnx_13801 0 1 13456 value=1.767767e-01 prim::Constant pnnx_13802 0 1 13457 value=trunc prim::Constant pnnx_13803 0 1 13458 value=6 prim::Constant pnnx_13804 0 1 13459 value=0 prim::Constant pnnx_13805 0 1 13460 value=1 prim::Constant pnnx_13806 0 1 13461 value=2 prim::Constant pnnx_13807 0 1 13462 value=3 prim::Constant pnnx_13808 0 1 13463 value=6 prim::Constant pnnx_13809 0 1 13464 value=4 prim::Constant pnnx_13810 0 1 13465 value=-2 prim::Constant pnnx_13811 0 1 13466 value=-1 prim::Constant pnnx_13812 0 1 13467 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.2.attn 0 1 relative_position_bias_table.139 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.139=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.2.attn 0 1 relative_position_index.139 @relative_position_index=(64,64)i64 #relative_position_index.139=(64,64)i64 Tensor.view Tensor.view_1736 2 1 13451 13452 x_windows.139 $input=13451 $shape=13452 #13451=(1,6,6,8,8,192)f32 #x_windows.139=(36,8,8,192)f32 Tensor.view Tensor.view_1737 2 1 x_windows.139 13454 x6.6 $input=x_windows.139 $shape=13454 #x_windows.139=(36,8,8,192)f32 #x6.6=(36,64,192)f32 aten::size pnnx_13813 2 1 x6.6 13459 13475 #x6.6=(36,64,192)f32 prim::NumToTensor pnnx_13814 1 1 13475 B_.139 aten::Int pnnx_13815 1 1 B_.139 13477 aten::Int pnnx_13816 1 1 B_.139 13478 aten::size pnnx_13817 2 1 x6.6 13460 13479 #x6.6=(36,64,192)f32 prim::NumToTensor pnnx_13818 1 1 13479 N.139 aten::Int pnnx_13819 1 1 N.139 13481 aten::Int pnnx_13820 1 1 N.139 13482 aten::size pnnx_13821 2 1 x6.6 13461 13483 #x6.6=(36,64,192)f32 prim::NumToTensor pnnx_13822 1 1 13483 C.285 aten::Int pnnx_13823 1 1 C.285 13485 nn.Linear layers_dfe.5.residual_group.blocks.2.attn.qkv 1 1 x6.6 13486 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.6=(36,64,192)f32 #13486=(36,64,576)f32 aten::div pnnx_13824 3 1 C.285 13458 13457 13487 aten::Int pnnx_13825 1 1 13487 13488 prim::ListConstruct pnnx_13826 5 1 13478 13482 13462 13463 13488 13489 prim::Constant pnnx_13828 0 1 24375 value=2 prim::Constant pnnx_13829 0 1 24376 value=0 prim::Constant pnnx_13830 0 1 24377 value=3 prim::Constant pnnx_13831 0 1 24378 value=1 prim::ListConstruct pnnx_13832 5 1 24375 24376 24377 24378 13464 13491 Tensor.reshape Tensor.reshape_570 2 1 13486 13489 13490 $input=13486 $shape=13489 #13486=(36,64,576)f32 #13490=(36,64,3,6,32)f32 prim::Constant pnnx_13834 0 1 24379 value=0 prim::Constant pnnx_13835 0 1 24380 value=0 prim::Constant pnnx_13837 0 1 24381 value=0 prim::Constant pnnx_13838 0 1 24382 value=1 prim::Constant pnnx_13840 0 1 24383 value=0 prim::Constant pnnx_13841 0 1 24384 value=2 torch.permute torch.permute_2801 2 1 13490 13491 qkv1.6 $input=13490 $dims=13491 #13490=(36,64,3,6,32)f32 #qkv1.6=(3,36,6,64,32)f32 Tensor.select Tensor.select_854 3 1 qkv1.6 24379 24380 q.139 $input=qkv1.6 $dim=24379 $index=24380 #qkv1.6=(3,36,6,64,32)f32 #q.139=(36,6,64,32)f32 aten::mul pnnx_13843 2 1 q.139 13456 q1.6 #q.139=(36,6,64,32)f32 #q1.6=(36,6,64,32)f32 Tensor.select Tensor.select_855 3 1 qkv1.6 24381 24382 k.139 $input=qkv1.6 $dim=24381 $index=24382 #qkv1.6=(3,36,6,64,32)f32 #k.139=(36,6,64,32)f32 prim::Constant pnnx_13846 0 1 24385 value=-1 prim::ListConstruct pnnx_13847 1 1 24385 13499 Tensor.view Tensor.view_1738 2 1 relative_position_index.139 13499 13500 $input=relative_position_index.139 $shape=13499 #relative_position_index.139=(64,64)i64 #13500=(4096)i64 prim::ListConstruct pnnx_13849 1 1 13500 13501 #13500=(4096)i64 prim::Constant pnnx_13851 0 1 24386 value=64 prim::Constant pnnx_13852 0 1 24387 value=-1 prim::ListConstruct pnnx_13853 3 1 13467 24386 24387 13503 Tensor.index Tensor.index_394 2 1 relative_position_bias_table.139 13501 13502 $input=relative_position_bias_table.139 $expr=13501 #relative_position_bias_table.139=(225,6)f32 #13502=(4096,6)f32 prim::Constant pnnx_13855 0 1 24388 value=2 prim::Constant pnnx_13856 0 1 24389 value=0 prim::Constant pnnx_13857 0 1 24390 value=1 prim::ListConstruct pnnx_13858 3 1 24388 24389 24390 13505 Tensor.view Tensor.view_1739 2 1 13502 13503 relative_position_bias.139 $input=13502 $shape=13503 #13502=(4096,6)f32 #relative_position_bias.139=(64,64,6)f32 prim::Constant pnnx_13862 0 1 24392 value=0 torch.permute torch.permute_2802 2 1 relative_position_bias.139 13505 13506 $input=relative_position_bias.139 $dims=13505 #relative_position_bias.139=(64,64,6)f32 #13506=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_207 1 1 13506 relative_position_bias1.6 memory_format=torch.contiguous_format $input=13506 #13506=(6,64,64)f32 #relative_position_bias1.6=(6,64,64)f32 prim::Constant pnnx_13864 0 1 24393 value=1 torch.transpose torch.transpose_3121 3 1 k.139 13465 13466 13497 $input=k.139 $dim0=13465 $dim1=13466 #k.139=(36,6,64,32)f32 #13497=(36,6,32,64)f32 torch.matmul torch.matmul_2340 2 1 q1.6 13497 attn.279 $input=q1.6 $other=13497 #q1.6=(36,6,64,32)f32 #13497=(36,6,32,64)f32 #attn.279=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3354 2 1 relative_position_bias1.6 24392 13508 $input=relative_position_bias1.6 $dim=24392 #relative_position_bias1.6=(6,64,64)f32 #13508=(1,6,64,64)f32 aten::add pnnx_13865 3 1 attn.279 13508 24393 input.315 #attn.279=(36,6,64,64)f32 #13508=(1,6,64,64)f32 #input.315=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.2.attn.softmax 1 1 input.315 13510 dim=-1 #input.315=(36,6,64,64)f32 #13510=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.attn.attn_drop 1 1 13510 13511 #13510=(36,6,64,64)f32 #13511=(36,6,64,64)f32 Tensor.select Tensor.select_856 3 1 qkv1.6 24383 24384 v.139 $input=qkv1.6 $dim=24383 $index=24384 #qkv1.6=(3,36,6,64,32)f32 #v.139=(36,6,64,32)f32 prim::Constant pnnx_13867 0 1 24394 value=1 prim::Constant pnnx_13868 0 1 24395 value=2 torch.matmul torch.matmul_2341 2 1 13511 v.139 13512 $input=13511 $other=v.139 #13511=(36,6,64,64)f32 #v.139=(36,6,64,32)f32 #13512=(36,6,64,32)f32 prim::ListConstruct pnnx_13870 3 1 13477 13481 13485 13514 torch.transpose torch.transpose_3122 3 1 13512 24394 24395 13513 $input=13512 $dim0=24394 $dim1=24395 #13512=(36,6,64,32)f32 #13513=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_571 2 1 13513 13514 input1.6 $input=13513 $shape=13514 #13513=(36,64,6,32)f32 #input1.6=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.2.attn.proj 1 1 input1.6 13516 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.6=(36,64,192)f32 #13516=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.attn.proj_drop 1 1 13516 13517 #13516=(36,64,192)f32 #13517=(36,64,192)f32 prim::Constant pnnx_13872 0 1 24396 value=-1 prim::Constant pnnx_13873 0 1 24397 value=8 prim::Constant pnnx_13874 0 1 24398 value=8 prim::ListConstruct pnnx_13875 4 1 24396 24397 24398 13426 13518 prim::Constant pnnx_13877 0 1 24399 value=8 prim::Constant pnnx_13878 0 1 24400 value=trunc aten::div pnnx_13879 3 1 H0.1 24399 24400 13520 aten::Int pnnx_13880 1 1 13520 13521 prim::Constant pnnx_13881 0 1 24401 value=8 prim::Constant pnnx_13882 0 1 24402 value=trunc aten::div pnnx_13883 3 1 W0.1 24401 24402 13522 aten::Int pnnx_13884 1 1 13522 13523 prim::Constant pnnx_13885 0 1 24403 value=1 prim::Constant pnnx_13886 0 1 24404 value=8 prim::Constant pnnx_13887 0 1 24405 value=8 prim::Constant pnnx_13888 0 1 24406 value=-1 prim::ListConstruct pnnx_13889 6 1 24403 13521 13523 24404 24405 24406 13524 prim::Constant pnnx_13891 0 1 24407 value=0 prim::Constant pnnx_13892 0 1 24408 value=1 prim::Constant pnnx_13893 0 1 24409 value=3 prim::Constant pnnx_13894 0 1 24410 value=2 prim::Constant pnnx_13895 0 1 24411 value=4 prim::Constant pnnx_13896 0 1 24412 value=5 prim::ListConstruct pnnx_13897 6 1 24407 24408 24409 24410 24411 24412 13526 Tensor.view Tensor.view_1740 2 1 13517 13518 windows.139 $input=13517 $shape=13518 #13517=(36,64,192)f32 #windows.139=(36,8,8,192)f32 Tensor.view Tensor.view_1741 2 1 windows.139 13524 x7.6 $input=windows.139 $shape=13524 #windows.139=(36,8,8,192)f32 #x7.6=(1,6,6,8,8,192)f32 prim::Constant pnnx_13901 0 1 24414 value=1 prim::Constant pnnx_13902 0 1 24415 value=-1 prim::ListConstruct pnnx_13903 4 1 24414 872 1112 24415 13529 torch.permute torch.permute_2803 2 1 x7.6 13526 13527 $input=x7.6 $dims=13526 #x7.6=(1,6,6,8,8,192)f32 #13527=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_208 1 1 13527 13528 memory_format=torch.contiguous_format $input=13527 #13527=(1,6,8,6,8,192)f32 #13528=(1,6,8,6,8,192)f32 aten::mul pnnx_13905 2 1 H0.1 W0.1 13531 aten::Int pnnx_13906 1 1 13531 13532 prim::ListConstruct pnnx_13907 3 1 13421 13532 13425 13533 prim::Constant pnnx_13909 0 1 13535 value=None prim::Constant pnnx_13910 0 1 24416 value=1 Tensor.view Tensor.view_1742 2 1 13528 13529 x8.6 $input=13528 $shape=13529 #13528=(1,6,8,6,8,192)f32 #x8.6=(1,48,48,192)f32 Tensor.view Tensor.view_1743 2 1 x8.6 13533 x9.6 $input=x8.6 $shape=13533 #x8.6=(1,48,48,192)f32 #x9.6=(1,2304,192)f32 aten::add pnnx_13911 3 1 13402 x9.6 24416 input.317 #13402=(1,2304,192)f32 #x9.6=(1,2304,192)f32 #input.317=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.2.norm2 1 1 input.317 13537 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.317=(1,2304,192)f32 #13537=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.2.mlp.fc1 1 1 13537 13542 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #13537=(1,2304,192)f32 #13542=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.2.mlp.act 1 1 13542 13543 #13542=(1,2304,384)f32 #13543=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.mlp.drop 1 1 13543 13544 #13543=(1,2304,384)f32 #13544=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.2.mlp.fc2 1 1 13544 13545 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #13544=(1,2304,384)f32 #13545=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.2.mlp.drop 1 1 13545 13546 #13545=(1,2304,192)f32 #13546=(1,2304,192)f32 prim::Constant pnnx_13912 0 1 13547 value=None prim::Constant pnnx_13913 0 1 24417 value=1 aten::add pnnx_13914 3 1 input.317 13546 24417 13548 #input.317=(1,2304,192)f32 #13546=(1,2304,192)f32 #13548=(1,2304,192)f32 prim::Constant pnnx_13915 0 1 13549 value=trunc prim::Constant pnnx_13916 0 1 13550 value=8 prim::Constant pnnx_13917 0 1 13551 value=0 prim::Constant pnnx_13918 0 1 13552 value=2 prim::Constant pnnx_13919 0 1 13553 value=-4 prim::Constant pnnx_13920 0 1 13554 value=1 prim::Constant pnnx_13921 0 1 13555 value=3 prim::Constant pnnx_13922 0 1 13556 value=8 prim::Constant pnnx_13923 0 1 13557 value=4 prim::Constant pnnx_13924 0 1 13558 value=5 prim::Constant pnnx_13925 0 1 13559 value=-1 prim::Constant pnnx_13926 0 1 13560 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.3 0 1 attn_mask.71 @attn_mask=(36,64,64)f32 #attn_mask.71=(36,64,64)f32 aten::size pnnx_13927 2 1 13548 13551 13567 #13548=(1,2304,192)f32 prim::NumToTensor pnnx_13928 1 1 13567 B.169 aten::Int pnnx_13929 1 1 B.169 13569 aten::Int pnnx_13930 1 1 B.169 13570 aten::size pnnx_13931 2 1 13548 13552 13571 #13548=(1,2304,192)f32 prim::NumToTensor pnnx_13932 1 1 13571 C.287 aten::Int pnnx_13933 1 1 C.287 13573 aten::Int pnnx_13934 1 1 C.287 13574 aten::Int pnnx_13935 1 1 C.287 13575 aten::Int pnnx_13936 1 1 C.287 13576 nn.LayerNorm layers_dfe.5.residual_group.blocks.3.norm1 1 1 13548 13577 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #13548=(1,2304,192)f32 #13577=(1,2304,192)f32 prim::ListConstruct pnnx_13937 4 1 13570 869 1109 13576 13578 prim::Constant pnnx_13939 0 1 24418 value=-4 prim::ListConstruct pnnx_13940 2 1 13553 24418 13580 prim::Constant pnnx_13941 0 1 24419 value=2 prim::ListConstruct pnnx_13942 2 1 13554 24419 13581 Tensor.view Tensor.view_1744 2 1 13577 13578 x.141 $input=13577 $shape=13578 #13577=(1,2304,192)f32 #x.141=(1,48,48,192)f32 prim::Constant pnnx_13944 0 1 24420 value=0 torch.roll torch.roll_2488 3 1 x.141 13580 13581 x6.8 $input=x.141 $shifts=13580 $dims=13581 #x.141=(1,48,48,192)f32 #x6.8=(1,48,48,192)f32 aten::size pnnx_13945 2 1 x6.8 24420 13583 #x6.8=(1,48,48,192)f32 prim::NumToTensor pnnx_13946 1 1 13583 B1.8 aten::Int pnnx_13947 1 1 B1.8 13585 prim::Constant pnnx_13948 0 1 24421 value=1 aten::size pnnx_13949 2 1 x6.8 24421 13586 #x6.8=(1,48,48,192)f32 prim::NumToTensor pnnx_13950 1 1 13586 13587 prim::Constant pnnx_13951 0 1 24422 value=2 aten::size pnnx_13952 2 1 x6.8 24422 13588 #x6.8=(1,48,48,192)f32 prim::NumToTensor pnnx_13953 1 1 13588 13589 aten::size pnnx_13954 2 1 x6.8 13555 13590 #x6.8=(1,48,48,192)f32 prim::NumToTensor pnnx_13955 1 1 13590 C1.8 aten::Int pnnx_13956 1 1 C1.8 13592 aten::Int pnnx_13957 1 1 C1.8 13593 aten::div pnnx_13958 3 1 13587 13550 13549 13594 aten::Int pnnx_13959 1 1 13594 13595 prim::Constant pnnx_13960 0 1 24423 value=8 prim::Constant pnnx_13961 0 1 24424 value=trunc aten::div pnnx_13962 3 1 13589 24423 24424 13596 aten::Int pnnx_13963 1 1 13596 13597 prim::Constant pnnx_13964 0 1 24425 value=8 prim::ListConstruct pnnx_13965 6 1 13585 13595 13556 13597 24425 13593 13598 prim::Constant pnnx_13967 0 1 24426 value=0 prim::Constant pnnx_13968 0 1 24427 value=1 prim::Constant pnnx_13969 0 1 24428 value=3 prim::Constant pnnx_13970 0 1 24429 value=2 prim::ListConstruct pnnx_13971 6 1 24426 24427 24428 24429 13557 13558 13600 Tensor.view Tensor.view_1745 2 1 x6.8 13598 x7.8 $input=x6.8 $shape=13598 #x6.8=(1,48,48,192)f32 #x7.8=(1,6,8,6,8,192)f32 prim::Constant pnnx_13975 0 1 24431 value=8 prim::Constant pnnx_13976 0 1 24432 value=8 prim::ListConstruct pnnx_13977 4 1 13559 24431 24432 13592 13603 torch.permute torch.permute_2804 2 1 x7.8 13600 13601 $input=x7.8 $dims=13600 #x7.8=(1,6,8,6,8,192)f32 #13601=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_209 1 1 13601 13602 memory_format=torch.contiguous_format $input=13601 #13601=(1,6,6,8,8,192)f32 #13602=(1,6,6,8,8,192)f32 prim::Constant pnnx_13979 0 1 24433 value=-1 prim::ListConstruct pnnx_13980 3 1 24433 13560 13575 13605 prim::Constant pnnx_13982 0 1 13607 value=1.767767e-01 prim::Constant pnnx_13983 0 1 13608 value=trunc prim::Constant pnnx_13984 0 1 13609 value=6 prim::Constant pnnx_13985 0 1 13610 value=0 prim::Constant pnnx_13986 0 1 13611 value=1 prim::Constant pnnx_13987 0 1 13612 value=2 prim::Constant pnnx_13988 0 1 13613 value=3 prim::Constant pnnx_13989 0 1 13614 value=6 prim::Constant pnnx_13990 0 1 13615 value=4 prim::Constant pnnx_13991 0 1 13616 value=-2 prim::Constant pnnx_13992 0 1 13617 value=-1 prim::Constant pnnx_13993 0 1 13618 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.3.attn 0 1 relative_position_bias_table.141 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.141=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.3.attn 0 1 relative_position_index.141 @relative_position_index=(64,64)i64 #relative_position_index.141=(64,64)i64 Tensor.view Tensor.view_1746 2 1 13602 13603 x_windows.141 $input=13602 $shape=13603 #13602=(1,6,6,8,8,192)f32 #x_windows.141=(36,8,8,192)f32 Tensor.view Tensor.view_1747 2 1 x_windows.141 13605 x8.8 $input=x_windows.141 $shape=13605 #x_windows.141=(36,8,8,192)f32 #x8.8=(36,64,192)f32 aten::size pnnx_13994 2 1 x8.8 13610 13626 #x8.8=(36,64,192)f32 prim::NumToTensor pnnx_13995 1 1 13626 B_.141 aten::Int pnnx_13996 1 1 B_.141 13628 aten::Int pnnx_13997 1 1 B_.141 13629 aten::size pnnx_13998 2 1 x8.8 13611 13630 #x8.8=(36,64,192)f32 prim::NumToTensor pnnx_13999 1 1 13630 N.141 aten::Int pnnx_14000 1 1 N.141 13632 aten::Int pnnx_14001 1 1 N.141 13633 aten::Int pnnx_14002 1 1 N.141 13634 aten::Int pnnx_14003 1 1 N.141 13635 aten::Int pnnx_14004 1 1 N.141 13636 aten::Int pnnx_14005 1 1 N.141 13637 aten::size pnnx_14006 2 1 x8.8 13612 13638 #x8.8=(36,64,192)f32 prim::NumToTensor pnnx_14007 1 1 13638 C.289 aten::Int pnnx_14008 1 1 C.289 13640 nn.Linear layers_dfe.5.residual_group.blocks.3.attn.qkv 1 1 x8.8 13641 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.8=(36,64,192)f32 #13641=(36,64,576)f32 aten::div pnnx_14009 3 1 C.289 13609 13608 13642 aten::Int pnnx_14010 1 1 13642 13643 prim::ListConstruct pnnx_14011 5 1 13629 13637 13613 13614 13643 13644 prim::Constant pnnx_14013 0 1 24434 value=2 prim::Constant pnnx_14014 0 1 24435 value=0 prim::Constant pnnx_14015 0 1 24436 value=3 prim::Constant pnnx_14016 0 1 24437 value=1 prim::ListConstruct pnnx_14017 5 1 24434 24435 24436 24437 13615 13646 Tensor.reshape Tensor.reshape_572 2 1 13641 13644 13645 $input=13641 $shape=13644 #13641=(36,64,576)f32 #13645=(36,64,3,6,32)f32 prim::Constant pnnx_14019 0 1 24438 value=0 prim::Constant pnnx_14020 0 1 24439 value=0 prim::Constant pnnx_14022 0 1 24440 value=0 prim::Constant pnnx_14023 0 1 24441 value=1 prim::Constant pnnx_14025 0 1 24442 value=0 prim::Constant pnnx_14026 0 1 24443 value=2 torch.permute torch.permute_2805 2 1 13645 13646 qkv1.8 $input=13645 $dims=13646 #13645=(36,64,3,6,32)f32 #qkv1.8=(3,36,6,64,32)f32 Tensor.select Tensor.select_857 3 1 qkv1.8 24438 24439 q.141 $input=qkv1.8 $dim=24438 $index=24439 #qkv1.8=(3,36,6,64,32)f32 #q.141=(36,6,64,32)f32 aten::mul pnnx_14028 2 1 q.141 13607 q1.8 #q.141=(36,6,64,32)f32 #q1.8=(36,6,64,32)f32 Tensor.select Tensor.select_858 3 1 qkv1.8 24440 24441 k.141 $input=qkv1.8 $dim=24440 $index=24441 #qkv1.8=(3,36,6,64,32)f32 #k.141=(36,6,64,32)f32 prim::Constant pnnx_14031 0 1 24444 value=-1 prim::ListConstruct pnnx_14032 1 1 24444 13654 Tensor.view Tensor.view_1748 2 1 relative_position_index.141 13654 13655 $input=relative_position_index.141 $shape=13654 #relative_position_index.141=(64,64)i64 #13655=(4096)i64 prim::ListConstruct pnnx_14034 1 1 13655 13656 #13655=(4096)i64 prim::Constant pnnx_14036 0 1 24445 value=64 prim::Constant pnnx_14037 0 1 24446 value=-1 prim::ListConstruct pnnx_14038 3 1 13618 24445 24446 13658 Tensor.index Tensor.index_395 2 1 relative_position_bias_table.141 13656 13657 $input=relative_position_bias_table.141 $expr=13656 #relative_position_bias_table.141=(225,6)f32 #13657=(4096,6)f32 prim::Constant pnnx_14040 0 1 24447 value=2 prim::Constant pnnx_14041 0 1 24448 value=0 prim::Constant pnnx_14042 0 1 24449 value=1 prim::ListConstruct pnnx_14043 3 1 24447 24448 24449 13660 Tensor.view Tensor.view_1749 2 1 13657 13658 relative_position_bias.141 $input=13657 $shape=13658 #13657=(4096,6)f32 #relative_position_bias.141=(64,64,6)f32 prim::Constant pnnx_14047 0 1 24451 value=0 torch.permute torch.permute_2806 2 1 relative_position_bias.141 13660 13661 $input=relative_position_bias.141 $dims=13660 #relative_position_bias.141=(64,64,6)f32 #13661=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_210 1 1 13661 relative_position_bias1.8 memory_format=torch.contiguous_format $input=13661 #13661=(6,64,64)f32 #relative_position_bias1.8=(6,64,64)f32 prim::Constant pnnx_14049 0 1 24452 value=1 torch.transpose torch.transpose_3123 3 1 k.141 13616 13617 13652 $input=k.141 $dim0=13616 $dim1=13617 #k.141=(36,6,64,32)f32 #13652=(36,6,32,64)f32 torch.matmul torch.matmul_2342 2 1 q1.8 13652 attn.283 $input=q1.8 $other=13652 #q1.8=(36,6,64,32)f32 #13652=(36,6,32,64)f32 #attn.283=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3355 2 1 relative_position_bias1.8 24451 13663 $input=relative_position_bias1.8 $dim=24451 #relative_position_bias1.8=(6,64,64)f32 #13663=(1,6,64,64)f32 aten::add pnnx_14050 3 1 attn.283 13663 24452 attn2.4 #attn.283=(36,6,64,64)f32 #13663=(1,6,64,64)f32 #attn2.4=(36,6,64,64)f32 prim::Constant pnnx_14051 0 1 24453 value=0 aten::size pnnx_14052 2 1 attn_mask.71 24453 13665 #attn_mask.71=(36,64,64)f32 prim::NumToTensor pnnx_14053 1 1 13665 other.71 aten::Int pnnx_14054 1 1 other.71 13667 prim::Constant pnnx_14055 0 1 24454 value=trunc aten::div pnnx_14056 3 1 B_.141 other.71 24454 13668 aten::Int pnnx_14057 1 1 13668 13669 prim::Constant pnnx_14058 0 1 24455 value=6 prim::ListConstruct pnnx_14059 5 1 13669 13667 24455 13636 13635 13670 prim::Constant pnnx_14061 0 1 24456 value=1 prim::Constant pnnx_14063 0 1 24457 value=0 prim::Constant pnnx_14065 0 1 24458 value=1 Tensor.view Tensor.view_1750 2 1 attn2.4 13670 13671 $input=attn2.4 $shape=13670 #attn2.4=(36,6,64,64)f32 #13671=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3356 2 1 attn_mask.71 24456 13672 $input=attn_mask.71 $dim=24456 #attn_mask.71=(36,64,64)f32 #13672=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3357 2 1 13672 24457 13673 $input=13672 $dim=24457 #13672=(36,1,64,64)f32 #13673=(1,36,1,64,64)f32 aten::add pnnx_14066 3 1 13671 13673 24458 attn3.4 #13671=(1,36,6,64,64)f32 #13673=(1,36,1,64,64)f32 #attn3.4=(1,36,6,64,64)f32 prim::Constant pnnx_14067 0 1 24459 value=-1 prim::Constant pnnx_14068 0 1 24460 value=6 prim::ListConstruct pnnx_14069 4 1 24459 24460 13634 13633 13675 Tensor.view Tensor.view_1751 2 1 attn3.4 13675 input.319 $input=attn3.4 $shape=13675 #attn3.4=(1,36,6,64,64)f32 #input.319=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.3.attn.softmax 1 1 input.319 13677 dim=-1 #input.319=(36,6,64,64)f32 #13677=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.attn.attn_drop 1 1 13677 13678 #13677=(36,6,64,64)f32 #13678=(36,6,64,64)f32 Tensor.select Tensor.select_859 3 1 qkv1.8 24442 24443 v.141 $input=qkv1.8 $dim=24442 $index=24443 #qkv1.8=(3,36,6,64,32)f32 #v.141=(36,6,64,32)f32 prim::Constant pnnx_14072 0 1 24461 value=1 prim::Constant pnnx_14073 0 1 24462 value=2 torch.matmul torch.matmul_2343 2 1 13678 v.141 13679 $input=13678 $other=v.141 #13678=(36,6,64,64)f32 #v.141=(36,6,64,32)f32 #13679=(36,6,64,32)f32 prim::ListConstruct pnnx_14075 3 1 13628 13632 13640 13681 torch.transpose torch.transpose_3124 3 1 13679 24461 24462 13680 $input=13679 $dim0=24461 $dim1=24462 #13679=(36,6,64,32)f32 #13680=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_573 2 1 13680 13681 input1.8 $input=13680 $shape=13681 #13680=(36,64,6,32)f32 #input1.8=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.3.attn.proj 1 1 input1.8 13683 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.8=(36,64,192)f32 #13683=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.attn.proj_drop 1 1 13683 13684 #13683=(36,64,192)f32 #13684=(36,64,192)f32 prim::Constant pnnx_14077 0 1 24463 value=-1 prim::Constant pnnx_14078 0 1 24464 value=8 prim::Constant pnnx_14079 0 1 24465 value=8 prim::ListConstruct pnnx_14080 4 1 24463 24464 24465 13574 13685 prim::Constant pnnx_14082 0 1 24466 value=8 prim::Constant pnnx_14083 0 1 24467 value=trunc aten::div pnnx_14084 3 1 H0.1 24466 24467 13687 aten::Int pnnx_14085 1 1 13687 13688 prim::Constant pnnx_14086 0 1 24468 value=8 prim::Constant pnnx_14087 0 1 24469 value=trunc aten::div pnnx_14088 3 1 W0.1 24468 24469 13689 aten::Int pnnx_14089 1 1 13689 13690 prim::Constant pnnx_14090 0 1 24470 value=1 prim::Constant pnnx_14091 0 1 24471 value=8 prim::Constant pnnx_14092 0 1 24472 value=8 prim::Constant pnnx_14093 0 1 24473 value=-1 prim::ListConstruct pnnx_14094 6 1 24470 13688 13690 24471 24472 24473 13691 prim::Constant pnnx_14096 0 1 24474 value=0 prim::Constant pnnx_14097 0 1 24475 value=1 prim::Constant pnnx_14098 0 1 24476 value=3 prim::Constant pnnx_14099 0 1 24477 value=2 prim::Constant pnnx_14100 0 1 24478 value=4 prim::Constant pnnx_14101 0 1 24479 value=5 prim::ListConstruct pnnx_14102 6 1 24474 24475 24476 24477 24478 24479 13693 Tensor.view Tensor.view_1752 2 1 13684 13685 windows.141 $input=13684 $shape=13685 #13684=(36,64,192)f32 #windows.141=(36,8,8,192)f32 Tensor.view Tensor.view_1753 2 1 windows.141 13691 x9.8 $input=windows.141 $shape=13691 #windows.141=(36,8,8,192)f32 #x9.8=(1,6,6,8,8,192)f32 prim::Constant pnnx_14106 0 1 24481 value=1 prim::Constant pnnx_14107 0 1 24482 value=-1 prim::ListConstruct pnnx_14108 4 1 24481 866 1106 24482 13696 torch.permute torch.permute_2807 2 1 x9.8 13693 13694 $input=x9.8 $dims=13693 #x9.8=(1,6,6,8,8,192)f32 #13694=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_211 1 1 13694 13695 memory_format=torch.contiguous_format $input=13694 #13694=(1,6,8,6,8,192)f32 #13695=(1,6,8,6,8,192)f32 prim::Constant pnnx_14110 0 1 24483 value=4 prim::Constant pnnx_14111 0 1 24484 value=4 prim::ListConstruct pnnx_14112 2 1 24483 24484 13698 prim::Constant pnnx_14113 0 1 24485 value=1 prim::Constant pnnx_14114 0 1 24486 value=2 prim::ListConstruct pnnx_14115 2 1 24485 24486 13699 Tensor.view Tensor.view_1754 2 1 13695 13696 shifted_x.71 $input=13695 $shape=13696 #13695=(1,6,8,6,8,192)f32 #shifted_x.71=(1,48,48,192)f32 aten::mul pnnx_14117 2 1 H0.1 W0.1 13701 aten::Int pnnx_14118 1 1 13701 13702 prim::ListConstruct pnnx_14119 3 1 13569 13702 13573 13703 prim::Constant pnnx_14121 0 1 13705 value=None prim::Constant pnnx_14122 0 1 24487 value=1 torch.roll torch.roll_2489 3 1 shifted_x.71 13698 13699 x10.4 $input=shifted_x.71 $shifts=13698 $dims=13699 #shifted_x.71=(1,48,48,192)f32 #x10.4=(1,48,48,192)f32 Tensor.view Tensor.view_1755 2 1 x10.4 13703 x11.4 $input=x10.4 $shape=13703 #x10.4=(1,48,48,192)f32 #x11.4=(1,2304,192)f32 aten::add pnnx_14123 3 1 13548 x11.4 24487 input.321 #13548=(1,2304,192)f32 #x11.4=(1,2304,192)f32 #input.321=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.3.norm2 1 1 input.321 13707 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.321=(1,2304,192)f32 #13707=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.3.mlp.fc1 1 1 13707 13712 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #13707=(1,2304,192)f32 #13712=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.3.mlp.act 1 1 13712 13713 #13712=(1,2304,384)f32 #13713=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.mlp.drop 1 1 13713 13714 #13713=(1,2304,384)f32 #13714=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.3.mlp.fc2 1 1 13714 13715 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #13714=(1,2304,384)f32 #13715=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.3.mlp.drop 1 1 13715 13716 #13715=(1,2304,192)f32 #13716=(1,2304,192)f32 prim::Constant pnnx_14124 0 1 13717 value=None prim::Constant pnnx_14125 0 1 24488 value=1 aten::add pnnx_14126 3 1 input.321 13716 24488 13718 #input.321=(1,2304,192)f32 #13716=(1,2304,192)f32 #13718=(1,2304,192)f32 prim::Constant pnnx_14127 0 1 13719 value=trunc prim::Constant pnnx_14128 0 1 13720 value=8 prim::Constant pnnx_14129 0 1 13721 value=0 prim::Constant pnnx_14130 0 1 13722 value=2 prim::Constant pnnx_14131 0 1 13723 value=1 prim::Constant pnnx_14132 0 1 13724 value=3 prim::Constant pnnx_14133 0 1 13725 value=8 prim::Constant pnnx_14134 0 1 13726 value=4 prim::Constant pnnx_14135 0 1 13727 value=5 prim::Constant pnnx_14136 0 1 13728 value=-1 prim::Constant pnnx_14137 0 1 13729 value=64 aten::size pnnx_14138 2 1 13718 13721 13735 #13718=(1,2304,192)f32 prim::NumToTensor pnnx_14139 1 1 13735 B.171 aten::Int pnnx_14140 1 1 B.171 13737 aten::Int pnnx_14141 1 1 B.171 13738 aten::size pnnx_14142 2 1 13718 13722 13739 #13718=(1,2304,192)f32 prim::NumToTensor pnnx_14143 1 1 13739 C.291 aten::Int pnnx_14144 1 1 C.291 13741 aten::Int pnnx_14145 1 1 C.291 13742 aten::Int pnnx_14146 1 1 C.291 13743 aten::Int pnnx_14147 1 1 C.291 13744 nn.LayerNorm layers_dfe.5.residual_group.blocks.4.norm1 1 1 13718 13745 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #13718=(1,2304,192)f32 #13745=(1,2304,192)f32 prim::ListConstruct pnnx_14148 4 1 13738 863 1103 13744 13746 prim::Constant pnnx_14150 0 1 24489 value=0 Tensor.view Tensor.view_1756 2 1 13745 13746 x.143 $input=13745 $shape=13746 #13745=(1,2304,192)f32 #x.143=(1,48,48,192)f32 aten::size pnnx_14151 2 1 x.143 24489 13748 #x.143=(1,48,48,192)f32 prim::NumToTensor pnnx_14152 1 1 13748 B1.10 aten::Int pnnx_14153 1 1 B1.10 13750 aten::size pnnx_14154 2 1 x.143 13723 13751 #x.143=(1,48,48,192)f32 prim::NumToTensor pnnx_14155 1 1 13751 13752 prim::Constant pnnx_14156 0 1 24490 value=2 aten::size pnnx_14157 2 1 x.143 24490 13753 #x.143=(1,48,48,192)f32 prim::NumToTensor pnnx_14158 1 1 13753 13754 aten::size pnnx_14159 2 1 x.143 13724 13755 #x.143=(1,48,48,192)f32 prim::NumToTensor pnnx_14160 1 1 13755 C1.10 aten::Int pnnx_14161 1 1 C1.10 13757 aten::Int pnnx_14162 1 1 C1.10 13758 aten::div pnnx_14163 3 1 13752 13720 13719 13759 aten::Int pnnx_14164 1 1 13759 13760 prim::Constant pnnx_14165 0 1 24491 value=8 prim::Constant pnnx_14166 0 1 24492 value=trunc aten::div pnnx_14167 3 1 13754 24491 24492 13761 aten::Int pnnx_14168 1 1 13761 13762 prim::Constant pnnx_14169 0 1 24493 value=8 prim::ListConstruct pnnx_14170 6 1 13750 13760 13725 13762 24493 13758 13763 prim::Constant pnnx_14172 0 1 24494 value=0 prim::Constant pnnx_14173 0 1 24495 value=1 prim::Constant pnnx_14174 0 1 24496 value=3 prim::Constant pnnx_14175 0 1 24497 value=2 prim::ListConstruct pnnx_14176 6 1 24494 24495 24496 24497 13726 13727 13765 Tensor.view Tensor.view_1757 2 1 x.143 13763 x5.73 $input=x.143 $shape=13763 #x.143=(1,48,48,192)f32 #x5.73=(1,6,8,6,8,192)f32 prim::Constant pnnx_14180 0 1 24499 value=8 prim::Constant pnnx_14181 0 1 24500 value=8 prim::ListConstruct pnnx_14182 4 1 13728 24499 24500 13757 13768 torch.permute torch.permute_2808 2 1 x5.73 13765 13766 $input=x5.73 $dims=13765 #x5.73=(1,6,8,6,8,192)f32 #13766=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_212 1 1 13766 13767 memory_format=torch.contiguous_format $input=13766 #13766=(1,6,6,8,8,192)f32 #13767=(1,6,6,8,8,192)f32 prim::Constant pnnx_14184 0 1 24501 value=-1 prim::ListConstruct pnnx_14185 3 1 24501 13729 13743 13770 prim::Constant pnnx_14187 0 1 13772 value=1.767767e-01 prim::Constant pnnx_14188 0 1 13773 value=trunc prim::Constant pnnx_14189 0 1 13774 value=6 prim::Constant pnnx_14190 0 1 13775 value=0 prim::Constant pnnx_14191 0 1 13776 value=1 prim::Constant pnnx_14192 0 1 13777 value=2 prim::Constant pnnx_14193 0 1 13778 value=3 prim::Constant pnnx_14194 0 1 13779 value=6 prim::Constant pnnx_14195 0 1 13780 value=4 prim::Constant pnnx_14196 0 1 13781 value=-2 prim::Constant pnnx_14197 0 1 13782 value=-1 prim::Constant pnnx_14198 0 1 13783 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.4.attn 0 1 relative_position_bias_table.143 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.143=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.4.attn 0 1 relative_position_index.143 @relative_position_index=(64,64)i64 #relative_position_index.143=(64,64)i64 Tensor.view Tensor.view_1758 2 1 13767 13768 x_windows.143 $input=13767 $shape=13768 #13767=(1,6,6,8,8,192)f32 #x_windows.143=(36,8,8,192)f32 Tensor.view Tensor.view_1759 2 1 x_windows.143 13770 x6.10 $input=x_windows.143 $shape=13770 #x_windows.143=(36,8,8,192)f32 #x6.10=(36,64,192)f32 aten::size pnnx_14199 2 1 x6.10 13775 13791 #x6.10=(36,64,192)f32 prim::NumToTensor pnnx_14200 1 1 13791 B_.143 aten::Int pnnx_14201 1 1 B_.143 13793 aten::Int pnnx_14202 1 1 B_.143 13794 aten::size pnnx_14203 2 1 x6.10 13776 13795 #x6.10=(36,64,192)f32 prim::NumToTensor pnnx_14204 1 1 13795 N.143 aten::Int pnnx_14205 1 1 N.143 13797 aten::Int pnnx_14206 1 1 N.143 13798 aten::size pnnx_14207 2 1 x6.10 13777 13799 #x6.10=(36,64,192)f32 prim::NumToTensor pnnx_14208 1 1 13799 C.293 aten::Int pnnx_14209 1 1 C.293 13801 nn.Linear layers_dfe.5.residual_group.blocks.4.attn.qkv 1 1 x6.10 13802 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x6.10=(36,64,192)f32 #13802=(36,64,576)f32 aten::div pnnx_14210 3 1 C.293 13774 13773 13803 aten::Int pnnx_14211 1 1 13803 13804 prim::ListConstruct pnnx_14212 5 1 13794 13798 13778 13779 13804 13805 prim::Constant pnnx_14214 0 1 24502 value=2 prim::Constant pnnx_14215 0 1 24503 value=0 prim::Constant pnnx_14216 0 1 24504 value=3 prim::Constant pnnx_14217 0 1 24505 value=1 prim::ListConstruct pnnx_14218 5 1 24502 24503 24504 24505 13780 13807 Tensor.reshape Tensor.reshape_574 2 1 13802 13805 13806 $input=13802 $shape=13805 #13802=(36,64,576)f32 #13806=(36,64,3,6,32)f32 prim::Constant pnnx_14220 0 1 24506 value=0 prim::Constant pnnx_14221 0 1 24507 value=0 prim::Constant pnnx_14223 0 1 24508 value=0 prim::Constant pnnx_14224 0 1 24509 value=1 prim::Constant pnnx_14226 0 1 24510 value=0 prim::Constant pnnx_14227 0 1 24511 value=2 torch.permute torch.permute_2809 2 1 13806 13807 qkv1.10 $input=13806 $dims=13807 #13806=(36,64,3,6,32)f32 #qkv1.10=(3,36,6,64,32)f32 Tensor.select Tensor.select_860 3 1 qkv1.10 24506 24507 q.143 $input=qkv1.10 $dim=24506 $index=24507 #qkv1.10=(3,36,6,64,32)f32 #q.143=(36,6,64,32)f32 aten::mul pnnx_14229 2 1 q.143 13772 q1.10 #q.143=(36,6,64,32)f32 #q1.10=(36,6,64,32)f32 Tensor.select Tensor.select_861 3 1 qkv1.10 24508 24509 k.143 $input=qkv1.10 $dim=24508 $index=24509 #qkv1.10=(3,36,6,64,32)f32 #k.143=(36,6,64,32)f32 prim::Constant pnnx_14232 0 1 24512 value=-1 prim::ListConstruct pnnx_14233 1 1 24512 13815 Tensor.view Tensor.view_1760 2 1 relative_position_index.143 13815 13816 $input=relative_position_index.143 $shape=13815 #relative_position_index.143=(64,64)i64 #13816=(4096)i64 prim::ListConstruct pnnx_14235 1 1 13816 13817 #13816=(4096)i64 prim::Constant pnnx_14237 0 1 24513 value=64 prim::Constant pnnx_14238 0 1 24514 value=-1 prim::ListConstruct pnnx_14239 3 1 13783 24513 24514 13819 Tensor.index Tensor.index_396 2 1 relative_position_bias_table.143 13817 13818 $input=relative_position_bias_table.143 $expr=13817 #relative_position_bias_table.143=(225,6)f32 #13818=(4096,6)f32 prim::Constant pnnx_14241 0 1 24515 value=2 prim::Constant pnnx_14242 0 1 24516 value=0 prim::Constant pnnx_14243 0 1 24517 value=1 prim::ListConstruct pnnx_14244 3 1 24515 24516 24517 13821 Tensor.view Tensor.view_1761 2 1 13818 13819 relative_position_bias.143 $input=13818 $shape=13819 #13818=(4096,6)f32 #relative_position_bias.143=(64,64,6)f32 prim::Constant pnnx_14248 0 1 24519 value=0 torch.permute torch.permute_2810 2 1 relative_position_bias.143 13821 13822 $input=relative_position_bias.143 $dims=13821 #relative_position_bias.143=(64,64,6)f32 #13822=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_213 1 1 13822 relative_position_bias1.10 memory_format=torch.contiguous_format $input=13822 #13822=(6,64,64)f32 #relative_position_bias1.10=(6,64,64)f32 prim::Constant pnnx_14250 0 1 24520 value=1 torch.transpose torch.transpose_3125 3 1 k.143 13781 13782 13813 $input=k.143 $dim0=13781 $dim1=13782 #k.143=(36,6,64,32)f32 #13813=(36,6,32,64)f32 torch.matmul torch.matmul_2344 2 1 q1.10 13813 attn.287 $input=q1.10 $other=13813 #q1.10=(36,6,64,32)f32 #13813=(36,6,32,64)f32 #attn.287=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3358 2 1 relative_position_bias1.10 24519 13824 $input=relative_position_bias1.10 $dim=24519 #relative_position_bias1.10=(6,64,64)f32 #13824=(1,6,64,64)f32 aten::add pnnx_14251 3 1 attn.287 13824 24520 input.323 #attn.287=(36,6,64,64)f32 #13824=(1,6,64,64)f32 #input.323=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.4.attn.softmax 1 1 input.323 13826 dim=-1 #input.323=(36,6,64,64)f32 #13826=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.attn.attn_drop 1 1 13826 13827 #13826=(36,6,64,64)f32 #13827=(36,6,64,64)f32 Tensor.select Tensor.select_862 3 1 qkv1.10 24510 24511 v.143 $input=qkv1.10 $dim=24510 $index=24511 #qkv1.10=(3,36,6,64,32)f32 #v.143=(36,6,64,32)f32 prim::Constant pnnx_14253 0 1 24521 value=1 prim::Constant pnnx_14254 0 1 24522 value=2 torch.matmul torch.matmul_2345 2 1 13827 v.143 13828 $input=13827 $other=v.143 #13827=(36,6,64,64)f32 #v.143=(36,6,64,32)f32 #13828=(36,6,64,32)f32 prim::ListConstruct pnnx_14256 3 1 13793 13797 13801 13830 torch.transpose torch.transpose_3126 3 1 13828 24521 24522 13829 $input=13828 $dim0=24521 $dim1=24522 #13828=(36,6,64,32)f32 #13829=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_575 2 1 13829 13830 input1.10 $input=13829 $shape=13830 #13829=(36,64,6,32)f32 #input1.10=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.4.attn.proj 1 1 input1.10 13832 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.10=(36,64,192)f32 #13832=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.attn.proj_drop 1 1 13832 13833 #13832=(36,64,192)f32 #13833=(36,64,192)f32 prim::Constant pnnx_14258 0 1 24523 value=-1 prim::Constant pnnx_14259 0 1 24524 value=8 prim::Constant pnnx_14260 0 1 24525 value=8 prim::ListConstruct pnnx_14261 4 1 24523 24524 24525 13742 13834 prim::Constant pnnx_14263 0 1 24526 value=8 prim::Constant pnnx_14264 0 1 24527 value=trunc aten::div pnnx_14265 3 1 H0.1 24526 24527 13836 aten::Int pnnx_14266 1 1 13836 13837 prim::Constant pnnx_14267 0 1 24528 value=8 prim::Constant pnnx_14268 0 1 24529 value=trunc aten::div pnnx_14269 3 1 W0.1 24528 24529 13838 aten::Int pnnx_14270 1 1 13838 13839 prim::Constant pnnx_14271 0 1 24530 value=1 prim::Constant pnnx_14272 0 1 24531 value=8 prim::Constant pnnx_14273 0 1 24532 value=8 prim::Constant pnnx_14274 0 1 24533 value=-1 prim::ListConstruct pnnx_14275 6 1 24530 13837 13839 24531 24532 24533 13840 prim::Constant pnnx_14277 0 1 24534 value=0 prim::Constant pnnx_14278 0 1 24535 value=1 prim::Constant pnnx_14279 0 1 24536 value=3 prim::Constant pnnx_14280 0 1 24537 value=2 prim::Constant pnnx_14281 0 1 24538 value=4 prim::Constant pnnx_14282 0 1 24539 value=5 prim::ListConstruct pnnx_14283 6 1 24534 24535 24536 24537 24538 24539 13842 Tensor.view Tensor.view_1762 2 1 13833 13834 windows.143 $input=13833 $shape=13834 #13833=(36,64,192)f32 #windows.143=(36,8,8,192)f32 Tensor.view Tensor.view_1763 2 1 windows.143 13840 x7.10 $input=windows.143 $shape=13840 #windows.143=(36,8,8,192)f32 #x7.10=(1,6,6,8,8,192)f32 prim::Constant pnnx_14287 0 1 24541 value=1 prim::Constant pnnx_14288 0 1 24542 value=-1 prim::ListConstruct pnnx_14289 4 1 24541 860 1100 24542 13845 torch.permute torch.permute_2811 2 1 x7.10 13842 13843 $input=x7.10 $dims=13842 #x7.10=(1,6,6,8,8,192)f32 #13843=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_214 1 1 13843 13844 memory_format=torch.contiguous_format $input=13843 #13843=(1,6,8,6,8,192)f32 #13844=(1,6,8,6,8,192)f32 aten::mul pnnx_14291 2 1 H0.1 W0.1 13847 aten::Int pnnx_14292 1 1 13847 13848 prim::ListConstruct pnnx_14293 3 1 13737 13848 13741 13849 prim::Constant pnnx_14295 0 1 13851 value=None prim::Constant pnnx_14296 0 1 24543 value=1 Tensor.view Tensor.view_1764 2 1 13844 13845 x8.10 $input=13844 $shape=13845 #13844=(1,6,8,6,8,192)f32 #x8.10=(1,48,48,192)f32 Tensor.view Tensor.view_1765 2 1 x8.10 13849 x9.10 $input=x8.10 $shape=13849 #x8.10=(1,48,48,192)f32 #x9.10=(1,2304,192)f32 aten::add pnnx_14297 3 1 13718 x9.10 24543 input.325 #13718=(1,2304,192)f32 #x9.10=(1,2304,192)f32 #input.325=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.4.norm2 1 1 input.325 13853 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.325=(1,2304,192)f32 #13853=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.4.mlp.fc1 1 1 13853 13858 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #13853=(1,2304,192)f32 #13858=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.4.mlp.act 1 1 13858 13859 #13858=(1,2304,384)f32 #13859=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.mlp.drop 1 1 13859 13860 #13859=(1,2304,384)f32 #13860=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.4.mlp.fc2 1 1 13860 13861 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #13860=(1,2304,384)f32 #13861=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.4.mlp.drop 1 1 13861 13862 #13861=(1,2304,192)f32 #13862=(1,2304,192)f32 prim::Constant pnnx_14298 0 1 13863 value=None prim::Constant pnnx_14299 0 1 24544 value=1 aten::add pnnx_14300 3 1 input.325 13862 24544 13864 #input.325=(1,2304,192)f32 #13862=(1,2304,192)f32 #13864=(1,2304,192)f32 prim::Constant pnnx_14301 0 1 13865 value=trunc prim::Constant pnnx_14302 0 1 13866 value=8 prim::Constant pnnx_14303 0 1 13867 value=0 prim::Constant pnnx_14304 0 1 13868 value=2 prim::Constant pnnx_14305 0 1 13869 value=-4 prim::Constant pnnx_14306 0 1 13870 value=1 prim::Constant pnnx_14307 0 1 13871 value=3 prim::Constant pnnx_14308 0 1 13872 value=8 prim::Constant pnnx_14309 0 1 13873 value=4 prim::Constant pnnx_14310 0 1 13874 value=5 prim::Constant pnnx_14311 0 1 13875 value=-1 prim::Constant pnnx_14312 0 1 13876 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.5 0 1 attn_mask.73 @attn_mask=(36,64,64)f32 #attn_mask.73=(36,64,64)f32 aten::size pnnx_14313 2 1 13864 13867 13883 #13864=(1,2304,192)f32 prim::NumToTensor pnnx_14314 1 1 13883 B.173 aten::Int pnnx_14315 1 1 B.173 13885 aten::Int pnnx_14316 1 1 B.173 13886 aten::size pnnx_14317 2 1 13864 13868 13887 #13864=(1,2304,192)f32 prim::NumToTensor pnnx_14318 1 1 13887 C.295 aten::Int pnnx_14319 1 1 C.295 13889 aten::Int pnnx_14320 1 1 C.295 13890 aten::Int pnnx_14321 1 1 C.295 13891 aten::Int pnnx_14322 1 1 C.295 13892 nn.LayerNorm layers_dfe.5.residual_group.blocks.5.norm1 1 1 13864 13893 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #13864=(1,2304,192)f32 #13893=(1,2304,192)f32 prim::ListConstruct pnnx_14323 4 1 13886 857 1097 13892 13894 prim::Constant pnnx_14325 0 1 24545 value=-4 prim::ListConstruct pnnx_14326 2 1 13869 24545 13896 prim::Constant pnnx_14327 0 1 24546 value=2 prim::ListConstruct pnnx_14328 2 1 13870 24546 13897 Tensor.view Tensor.view_1766 2 1 13893 13894 x.145 $input=13893 $shape=13894 #13893=(1,2304,192)f32 #x.145=(1,48,48,192)f32 prim::Constant pnnx_14330 0 1 24547 value=0 torch.roll torch.roll_2490 3 1 x.145 13896 13897 x6.1 $input=x.145 $shifts=13896 $dims=13897 #x.145=(1,48,48,192)f32 #x6.1=(1,48,48,192)f32 aten::size pnnx_14331 2 1 x6.1 24547 13899 #x6.1=(1,48,48,192)f32 prim::NumToTensor pnnx_14332 1 1 13899 B1.1 aten::Int pnnx_14333 1 1 B1.1 13901 prim::Constant pnnx_14334 0 1 24548 value=1 aten::size pnnx_14335 2 1 x6.1 24548 13902 #x6.1=(1,48,48,192)f32 prim::NumToTensor pnnx_14336 1 1 13902 13903 prim::Constant pnnx_14337 0 1 24549 value=2 aten::size pnnx_14338 2 1 x6.1 24549 13904 #x6.1=(1,48,48,192)f32 prim::NumToTensor pnnx_14339 1 1 13904 13905 aten::size pnnx_14340 2 1 x6.1 13871 13906 #x6.1=(1,48,48,192)f32 prim::NumToTensor pnnx_14341 1 1 13906 C1.1 aten::Int pnnx_14342 1 1 C1.1 13908 aten::Int pnnx_14343 1 1 C1.1 13909 aten::div pnnx_14344 3 1 13903 13866 13865 13910 aten::Int pnnx_14345 1 1 13910 13911 prim::Constant pnnx_14346 0 1 24550 value=8 prim::Constant pnnx_14347 0 1 24551 value=trunc aten::div pnnx_14348 3 1 13905 24550 24551 13912 aten::Int pnnx_14349 1 1 13912 13913 prim::Constant pnnx_14350 0 1 24552 value=8 prim::ListConstruct pnnx_14351 6 1 13901 13911 13872 13913 24552 13909 13914 prim::Constant pnnx_14353 0 1 24553 value=0 prim::Constant pnnx_14354 0 1 24554 value=1 prim::Constant pnnx_14355 0 1 24555 value=3 prim::Constant pnnx_14356 0 1 24556 value=2 prim::ListConstruct pnnx_14357 6 1 24553 24554 24555 24556 13873 13874 13916 Tensor.view Tensor.view_1767 2 1 x6.1 13914 x7.1 $input=x6.1 $shape=13914 #x6.1=(1,48,48,192)f32 #x7.1=(1,6,8,6,8,192)f32 prim::Constant pnnx_14361 0 1 24558 value=8 prim::Constant pnnx_14362 0 1 24559 value=8 prim::ListConstruct pnnx_14363 4 1 13875 24558 24559 13908 13919 torch.permute torch.permute_2812 2 1 x7.1 13916 13917 $input=x7.1 $dims=13916 #x7.1=(1,6,8,6,8,192)f32 #13917=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_215 1 1 13917 13918 memory_format=torch.contiguous_format $input=13917 #13917=(1,6,6,8,8,192)f32 #13918=(1,6,6,8,8,192)f32 prim::Constant pnnx_14365 0 1 24560 value=-1 prim::ListConstruct pnnx_14366 3 1 24560 13876 13891 13921 prim::Constant pnnx_14368 0 1 13923 value=1.767767e-01 prim::Constant pnnx_14369 0 1 13924 value=trunc prim::Constant pnnx_14370 0 1 13925 value=6 prim::Constant pnnx_14371 0 1 13926 value=0 prim::Constant pnnx_14372 0 1 13927 value=1 prim::Constant pnnx_14373 0 1 13928 value=2 prim::Constant pnnx_14374 0 1 13929 value=3 prim::Constant pnnx_14375 0 1 13930 value=6 prim::Constant pnnx_14376 0 1 13931 value=4 prim::Constant pnnx_14377 0 1 13932 value=-2 prim::Constant pnnx_14378 0 1 13933 value=-1 prim::Constant pnnx_14379 0 1 13934 value=64 pnnx.Attribute layers_dfe.5.residual_group.blocks.5.attn 0 1 relative_position_bias_table.145 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.145=(225,6)f32 pnnx.Attribute layers_dfe.5.residual_group.blocks.5.attn 0 1 relative_position_index.145 @relative_position_index=(64,64)i64 #relative_position_index.145=(64,64)i64 Tensor.view Tensor.view_1768 2 1 13918 13919 x_windows.145 $input=13918 $shape=13919 #13918=(1,6,6,8,8,192)f32 #x_windows.145=(36,8,8,192)f32 Tensor.view Tensor.view_1769 2 1 x_windows.145 13921 x8.1 $input=x_windows.145 $shape=13921 #x_windows.145=(36,8,8,192)f32 #x8.1=(36,64,192)f32 aten::size pnnx_14380 2 1 x8.1 13926 13942 #x8.1=(36,64,192)f32 prim::NumToTensor pnnx_14381 1 1 13942 B_.145 aten::Int pnnx_14382 1 1 B_.145 13944 aten::Int pnnx_14383 1 1 B_.145 13945 aten::size pnnx_14384 2 1 x8.1 13927 13946 #x8.1=(36,64,192)f32 prim::NumToTensor pnnx_14385 1 1 13946 N.145 aten::Int pnnx_14386 1 1 N.145 13948 aten::Int pnnx_14387 1 1 N.145 13949 aten::Int pnnx_14388 1 1 N.145 13950 aten::Int pnnx_14389 1 1 N.145 13951 aten::Int pnnx_14390 1 1 N.145 13952 aten::Int pnnx_14391 1 1 N.145 13953 aten::size pnnx_14392 2 1 x8.1 13928 13954 #x8.1=(36,64,192)f32 prim::NumToTensor pnnx_14393 1 1 13954 C.297 aten::Int pnnx_14394 1 1 C.297 13956 nn.Linear layers_dfe.5.residual_group.blocks.5.attn.qkv 1 1 x8.1 13957 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x8.1=(36,64,192)f32 #13957=(36,64,576)f32 aten::div pnnx_14395 3 1 C.297 13925 13924 13958 aten::Int pnnx_14396 1 1 13958 13959 prim::ListConstruct pnnx_14397 5 1 13945 13953 13929 13930 13959 13960 prim::Constant pnnx_14399 0 1 24561 value=2 prim::Constant pnnx_14400 0 1 24562 value=0 prim::Constant pnnx_14401 0 1 24563 value=3 prim::Constant pnnx_14402 0 1 24564 value=1 prim::ListConstruct pnnx_14403 5 1 24561 24562 24563 24564 13931 13962 Tensor.reshape Tensor.reshape_576 2 1 13957 13960 13961 $input=13957 $shape=13960 #13957=(36,64,576)f32 #13961=(36,64,3,6,32)f32 prim::Constant pnnx_14405 0 1 24565 value=0 prim::Constant pnnx_14406 0 1 24566 value=0 prim::Constant pnnx_14408 0 1 24567 value=0 prim::Constant pnnx_14409 0 1 24568 value=1 prim::Constant pnnx_14411 0 1 24569 value=0 prim::Constant pnnx_14412 0 1 24570 value=2 torch.permute torch.permute_2813 2 1 13961 13962 qkv1.1 $input=13961 $dims=13962 #13961=(36,64,3,6,32)f32 #qkv1.1=(3,36,6,64,32)f32 Tensor.select Tensor.select_863 3 1 qkv1.1 24565 24566 q.145 $input=qkv1.1 $dim=24565 $index=24566 #qkv1.1=(3,36,6,64,32)f32 #q.145=(36,6,64,32)f32 aten::mul pnnx_14414 2 1 q.145 13923 q1.1 #q.145=(36,6,64,32)f32 #q1.1=(36,6,64,32)f32 Tensor.select Tensor.select_864 3 1 qkv1.1 24567 24568 k.145 $input=qkv1.1 $dim=24567 $index=24568 #qkv1.1=(3,36,6,64,32)f32 #k.145=(36,6,64,32)f32 prim::Constant pnnx_14417 0 1 24571 value=-1 prim::ListConstruct pnnx_14418 1 1 24571 13970 Tensor.view Tensor.view_1770 2 1 relative_position_index.145 13970 13971 $input=relative_position_index.145 $shape=13970 #relative_position_index.145=(64,64)i64 #13971=(4096)i64 prim::ListConstruct pnnx_14420 1 1 13971 13972 #13971=(4096)i64 prim::Constant pnnx_14422 0 1 24572 value=64 prim::Constant pnnx_14423 0 1 24573 value=-1 prim::ListConstruct pnnx_14424 3 1 13934 24572 24573 13974 Tensor.index Tensor.index_397 2 1 relative_position_bias_table.145 13972 13973 $input=relative_position_bias_table.145 $expr=13972 #relative_position_bias_table.145=(225,6)f32 #13973=(4096,6)f32 prim::Constant pnnx_14426 0 1 24574 value=2 prim::Constant pnnx_14427 0 1 24575 value=0 prim::Constant pnnx_14428 0 1 24576 value=1 prim::ListConstruct pnnx_14429 3 1 24574 24575 24576 13976 Tensor.view Tensor.view_1771 2 1 13973 13974 relative_position_bias.145 $input=13973 $shape=13974 #13973=(4096,6)f32 #relative_position_bias.145=(64,64,6)f32 prim::Constant pnnx_14433 0 1 24578 value=0 torch.permute torch.permute_2814 2 1 relative_position_bias.145 13976 13977 $input=relative_position_bias.145 $dims=13976 #relative_position_bias.145=(64,64,6)f32 #13977=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_216 1 1 13977 relative_position_bias1.1 memory_format=torch.contiguous_format $input=13977 #13977=(6,64,64)f32 #relative_position_bias1.1=(6,64,64)f32 prim::Constant pnnx_14435 0 1 24579 value=1 torch.transpose torch.transpose_3127 3 1 k.145 13932 13933 13968 $input=k.145 $dim0=13932 $dim1=13933 #k.145=(36,6,64,32)f32 #13968=(36,6,32,64)f32 torch.matmul torch.matmul_2346 2 1 q1.1 13968 attn.291 $input=q1.1 $other=13968 #q1.1=(36,6,64,32)f32 #13968=(36,6,32,64)f32 #attn.291=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3359 2 1 relative_position_bias1.1 24578 13979 $input=relative_position_bias1.1 $dim=24578 #relative_position_bias1.1=(6,64,64)f32 #13979=(1,6,64,64)f32 aten::add pnnx_14436 3 1 attn.291 13979 24579 attn2.1 #attn.291=(36,6,64,64)f32 #13979=(1,6,64,64)f32 #attn2.1=(36,6,64,64)f32 prim::Constant pnnx_14437 0 1 24580 value=0 aten::size pnnx_14438 2 1 attn_mask.73 24580 13981 #attn_mask.73=(36,64,64)f32 prim::NumToTensor pnnx_14439 1 1 13981 other.73 aten::Int pnnx_14440 1 1 other.73 13983 prim::Constant pnnx_14441 0 1 24581 value=trunc aten::div pnnx_14442 3 1 B_.145 other.73 24581 13984 aten::Int pnnx_14443 1 1 13984 13985 prim::Constant pnnx_14444 0 1 24582 value=6 prim::ListConstruct pnnx_14445 5 1 13985 13983 24582 13952 13951 13986 prim::Constant pnnx_14447 0 1 24583 value=1 prim::Constant pnnx_14449 0 1 24584 value=0 prim::Constant pnnx_14451 0 1 24585 value=1 Tensor.view Tensor.view_1772 2 1 attn2.1 13986 13987 $input=attn2.1 $shape=13986 #attn2.1=(36,6,64,64)f32 #13987=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3360 2 1 attn_mask.73 24583 13988 $input=attn_mask.73 $dim=24583 #attn_mask.73=(36,64,64)f32 #13988=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3361 2 1 13988 24584 13989 $input=13988 $dim=24584 #13988=(36,1,64,64)f32 #13989=(1,36,1,64,64)f32 aten::add pnnx_14452 3 1 13987 13989 24585 attn3.1 #13987=(1,36,6,64,64)f32 #13989=(1,36,1,64,64)f32 #attn3.1=(1,36,6,64,64)f32 prim::Constant pnnx_14453 0 1 24586 value=-1 prim::Constant pnnx_14454 0 1 24587 value=6 prim::ListConstruct pnnx_14455 4 1 24586 24587 13950 13949 13991 Tensor.view Tensor.view_1773 2 1 attn3.1 13991 input.327 $input=attn3.1 $shape=13991 #attn3.1=(1,36,6,64,64)f32 #input.327=(36,6,64,64)f32 nn.Softmax layers_dfe.5.residual_group.blocks.5.attn.softmax 1 1 input.327 13993 dim=-1 #input.327=(36,6,64,64)f32 #13993=(36,6,64,64)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.attn.attn_drop 1 1 13993 13994 #13993=(36,6,64,64)f32 #13994=(36,6,64,64)f32 Tensor.select Tensor.select_865 3 1 qkv1.1 24569 24570 v.145 $input=qkv1.1 $dim=24569 $index=24570 #qkv1.1=(3,36,6,64,32)f32 #v.145=(36,6,64,32)f32 prim::Constant pnnx_14458 0 1 24588 value=1 prim::Constant pnnx_14459 0 1 24589 value=2 torch.matmul torch.matmul_2347 2 1 13994 v.145 13995 $input=13994 $other=v.145 #13994=(36,6,64,64)f32 #v.145=(36,6,64,32)f32 #13995=(36,6,64,32)f32 prim::ListConstruct pnnx_14461 3 1 13944 13948 13956 13997 torch.transpose torch.transpose_3128 3 1 13995 24588 24589 13996 $input=13995 $dim0=24588 $dim1=24589 #13995=(36,6,64,32)f32 #13996=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_577 2 1 13996 13997 input1.1 $input=13996 $shape=13997 #13996=(36,64,6,32)f32 #input1.1=(36,64,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.5.attn.proj 1 1 input1.1 13999 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input1.1=(36,64,192)f32 #13999=(36,64,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.attn.proj_drop 1 1 13999 14000 #13999=(36,64,192)f32 #14000=(36,64,192)f32 prim::Constant pnnx_14463 0 1 24590 value=-1 prim::Constant pnnx_14464 0 1 24591 value=8 prim::Constant pnnx_14465 0 1 24592 value=8 prim::ListConstruct pnnx_14466 4 1 24590 24591 24592 13890 14001 prim::Constant pnnx_14468 0 1 24593 value=8 prim::Constant pnnx_14469 0 1 24594 value=trunc aten::div pnnx_14470 3 1 H0.1 24593 24594 14003 aten::Int pnnx_14471 1 1 14003 14004 prim::Constant pnnx_14472 0 1 24595 value=8 prim::Constant pnnx_14473 0 1 24596 value=trunc aten::div pnnx_14474 3 1 W0.1 24595 24596 14005 aten::Int pnnx_14475 1 1 14005 14006 prim::Constant pnnx_14476 0 1 24597 value=1 prim::Constant pnnx_14477 0 1 24598 value=8 prim::Constant pnnx_14478 0 1 24599 value=8 prim::Constant pnnx_14479 0 1 24600 value=-1 prim::ListConstruct pnnx_14480 6 1 24597 14004 14006 24598 24599 24600 14007 prim::Constant pnnx_14482 0 1 24601 value=0 prim::Constant pnnx_14483 0 1 24602 value=1 prim::Constant pnnx_14484 0 1 24603 value=3 prim::Constant pnnx_14485 0 1 24604 value=2 prim::Constant pnnx_14486 0 1 24605 value=4 prim::Constant pnnx_14487 0 1 24606 value=5 prim::ListConstruct pnnx_14488 6 1 24601 24602 24603 24604 24605 24606 14009 Tensor.view Tensor.view_1774 2 1 14000 14001 windows.145 $input=14000 $shape=14001 #14000=(36,64,192)f32 #windows.145=(36,8,8,192)f32 Tensor.view Tensor.view_1775 2 1 windows.145 14007 x9.1 $input=windows.145 $shape=14007 #windows.145=(36,8,8,192)f32 #x9.1=(1,6,6,8,8,192)f32 prim::Constant pnnx_14492 0 1 24608 value=1 prim::Constant pnnx_14493 0 1 24609 value=-1 prim::ListConstruct pnnx_14494 4 1 24608 854 1094 24609 14012 torch.permute torch.permute_2815 2 1 x9.1 14009 14010 $input=x9.1 $dims=14009 #x9.1=(1,6,6,8,8,192)f32 #14010=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_217 1 1 14010 14011 memory_format=torch.contiguous_format $input=14010 #14010=(1,6,8,6,8,192)f32 #14011=(1,6,8,6,8,192)f32 prim::Constant pnnx_14496 0 1 24610 value=4 prim::Constant pnnx_14497 0 1 24611 value=4 prim::ListConstruct pnnx_14498 2 1 24610 24611 14014 prim::Constant pnnx_14499 0 1 24612 value=1 prim::Constant pnnx_14500 0 1 24613 value=2 prim::ListConstruct pnnx_14501 2 1 24612 24613 14015 Tensor.view Tensor.view_1776 2 1 14011 14012 shifted_x.73 $input=14011 $shape=14012 #14011=(1,6,8,6,8,192)f32 #shifted_x.73=(1,48,48,192)f32 aten::mul pnnx_14503 2 1 H0.1 W0.1 14017 aten::Int pnnx_14504 1 1 14017 14018 prim::ListConstruct pnnx_14505 3 1 13885 14018 13889 14019 prim::Constant pnnx_14507 0 1 14021 value=None prim::Constant pnnx_14508 0 1 24614 value=1 torch.roll torch.roll_2491 3 1 shifted_x.73 14014 14015 x10.1 $input=shifted_x.73 $shifts=14014 $dims=14015 #shifted_x.73=(1,48,48,192)f32 #x10.1=(1,48,48,192)f32 Tensor.view Tensor.view_1777 2 1 x10.1 14019 x11.1 $input=x10.1 $shape=14019 #x10.1=(1,48,48,192)f32 #x11.1=(1,2304,192)f32 aten::add pnnx_14509 3 1 13864 x11.1 24614 input.329 #13864=(1,2304,192)f32 #x11.1=(1,2304,192)f32 #input.329=(1,2304,192)f32 nn.LayerNorm layers_dfe.5.residual_group.blocks.5.norm2 1 1 input.329 14023 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.329=(1,2304,192)f32 #14023=(1,2304,192)f32 nn.Linear layers_dfe.5.residual_group.blocks.5.mlp.fc1 1 1 14023 14028 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #14023=(1,2304,192)f32 #14028=(1,2304,384)f32 nn.GELU layers_dfe.5.residual_group.blocks.5.mlp.act 1 1 14028 14029 #14028=(1,2304,384)f32 #14029=(1,2304,384)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.mlp.drop 1 1 14029 14030 #14029=(1,2304,384)f32 #14030=(1,2304,384)f32 nn.Linear layers_dfe.5.residual_group.blocks.5.mlp.fc2 1 1 14030 14031 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #14030=(1,2304,384)f32 #14031=(1,2304,192)f32 nn.Dropout layers_dfe.5.residual_group.blocks.5.mlp.drop 1 1 14031 14032 #14031=(1,2304,192)f32 #14032=(1,2304,192)f32 prim::Constant pnnx_14510 0 1 14033 value=None prim::Constant pnnx_14511 0 1 24615 value=1 aten::add pnnx_14512 3 1 input.329 14032 24615 14034 #input.329=(1,2304,192)f32 #14032=(1,2304,192)f32 #14034=(1,2304,192)f32 prim::Constant pnnx_14513 0 1 14035 value=0 prim::Constant pnnx_14514 0 1 14036 value=1 prim::Constant pnnx_14515 0 1 14037 value=2 prim::Constant pnnx_14516 0 1 14038 value=192 aten::size pnnx_14517 2 1 14034 14035 14039 #14034=(1,2304,192)f32 prim::NumToTensor pnnx_14518 1 1 14039 B.161 aten::Int pnnx_14519 1 1 B.161 14041 prim::ListConstruct pnnx_14521 4 1 14041 14038 851 1091 14043 torch.transpose torch.transpose_3129 3 1 14034 14036 14037 14042 $input=14034 $dim0=14036 $dim1=14037 #14034=(1,2304,192)f32 #14042=(1,192,2304)f32 Tensor.view Tensor.view_1778 2 1 14042 14043 input.301 $input=14042 $shape=14043 #14042=(1,192,2304)f32 #input.301=(1,192,48,48)f32 nn.Conv2d layers_dfe.5.conv 1 1 input.301 14045 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.301=(1,192,48,48)f32 #14045=(1,192,48,48)f32 prim::Constant pnnx_14523 0 1 14046 value=-1 prim::Constant pnnx_14524 0 1 14047 value=2 prim::Constant pnnx_14525 0 1 14048 value=1 prim::Constant pnnx_14527 0 1 24616 value=2 torch.flatten torch.flatten_2196 3 1 14045 14047 14046 14049 $input=14045 $start_dim=14047 $end_dim=14046 #14045=(1,192,48,48)f32 #14049=(1,192,2304)f32 torch.transpose torch.transpose_3130 3 1 14049 14048 24616 14050 $input=14049 $dim0=14048 $dim1=24616 #14049=(1,192,2304)f32 #14050=(1,2304,192)f32 aten::add pnnx_14529 3 1 14050 13069 13070 14051 #14050=(1,2304,192)f32 #13069=(1,2304,192)f32 #14051=(1,2304,192)f32 nn.LayerNorm norm_dfe 1 1 14051 1517 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #14051=(1,2304,192)f32 #1517=(1,2304,192)f32 prim::Constant pnnx_14530 0 1 14052 value=0 prim::Constant pnnx_14531 0 1 14053 value=1 prim::Constant pnnx_14532 0 1 14054 value=2 prim::Constant pnnx_14533 0 1 14055 value=192 aten::size pnnx_14534 2 1 1517 14052 14056 #1517=(1,2304,192)f32 prim::NumToTensor pnnx_14535 1 1 14056 B.175 aten::Int pnnx_14536 1 1 B.175 14058 prim::ListConstruct pnnx_14538 4 1 14058 14055 848 1088 14060 torch.transpose torch.transpose_3131 3 1 1517 14053 14054 14059 $input=1517 $dim0=14053 $dim1=14054 #1517=(1,2304,192)f32 #14059=(1,192,2304)f32 Tensor.view Tensor.view_1779 2 1 14059 14060 input.303 $input=14059 $shape=14060 #14059=(1,192,2304)f32 #input.303=(1,192,48,48)f32 nn.Conv2d conv_after_body_dfe 1 1 input.303 1523 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.303=(1,192,48,48)f32 #1523=(1,192,48,48)f32 prim::Constant pnnx_14540 0 1 24617 value=1 aten::add pnnx_14541 3 1 1523 157 24617 x_b.1 #1523=(1,192,48,48)f32 #157=(1,192,48,48)f32 #x_b.1=(1,192,48,48)f32 prim::Constant pnnx_14542 0 1 14062 value=1.900000e+01 prim::Constant pnnx_14543 0 1 14063 value=1 aten::sub pnnx_14544 3 1 x_b.1 x_a.1 14063 input.305 #x_b.1=(1,192,48,48)f32 #x_a.1=(1,192,48,48)f32 #input.305=(1,192,48,48)f32 nn.ReflectionPad2d manipulator.convblks.0.pad1 1 1 input.305 14072 padding=(3,3,3,3) #input.305=(1,192,48,48)f32 #14072=(1,192,54,54)f32 nn.Conv2d manipulator.convblks.0.conv1 1 1 14072 14073 bias=False dilation=(1,1) groups=1 in_channels=192 kernel_size=(7,7) out_channels=192 padding=(0,0) padding_mode=zeros stride=(1,1) @weight=(192,192,7,7)f32 #14072=(1,192,54,54)f32 #14073=(1,192,48,48)f32 nn.ReLU manipulator.convblks.0.relu 1 1 14073 14074 #14073=(1,192,48,48)f32 #14074=(1,192,48,48)f32 aten::mul pnnx_14545 2 1 14074 14062 input0.77 #14074=(1,192,48,48)f32 #input0.77=(1,192,48,48)f32 nn.ReflectionPad2d manipulator.convblks_after.0.pad1 1 1 input0.77 14079 padding=(1,1,1,1) #input0.77=(1,192,48,48)f32 #14079=(1,192,50,50)f32 nn.Conv2d manipulator.convblks_after.0.conv1 1 1 14079 14080 bias=False dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(0,0) padding_mode=zeros stride=(1,1) @weight=(192,192,3,3)f32 #14079=(1,192,50,50)f32 #14080=(1,192,48,48)f32 prim::Constant pnnx_14546 0 1 14082 value=1 nn.ReflectionPad2d manipulator.resblks.0.pad1 1 1 14080 14088 padding=(1,1,1,1) #14080=(1,192,48,48)f32 #14088=(1,192,50,50)f32 nn.Conv2d manipulator.resblks.0.conv1 1 1 14088 14089 bias=False dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(0,0) padding_mode=zeros stride=(1,1) @weight=(192,192,3,3)f32 #14088=(1,192,50,50)f32 #14089=(1,192,48,48)f32 nn.ReLU manipulator.resblks.0.relu 1 1 14089 14090 #14089=(1,192,48,48)f32 #14090=(1,192,48,48)f32 nn.ReflectionPad2d manipulator.resblks.0.pad2 1 1 14090 14091 padding=(1,1,1,1) #14090=(1,192,48,48)f32 #14091=(1,192,50,50)f32 nn.Conv2d manipulator.resblks.0.conv2 1 1 14091 14092 bias=False dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(0,0) padding_mode=zeros stride=(1,1) @weight=(192,192,3,3)f32 #14091=(1,192,50,50)f32 #14092=(1,192,48,48)f32 aten::add pnnx_14547 3 1 14092 14080 14082 diff.1 #14092=(1,192,48,48)f32 #14080=(1,192,48,48)f32 #diff.1=(1,192,48,48)f32 prim::Constant pnnx_14548 0 1 24618 value=1 aten::add pnnx_14549 3 1 x_b.1 diff.1 24618 14094 #x_b.1=(1,192,48,48)f32 #diff.1=(1,192,48,48)f32 #14094=(1,192,48,48)f32 prim::Constant pnnx_14550 0 1 24619 value=2 aten::size pnnx_14551 2 1 14094 24619 1533 #14094=(1,192,48,48)f32 prim::NumToTensor pnnx_14552 1 1 1533 H1.1 aten::Int pnnx_14553 1 1 H1.1 1537 aten::Int pnnx_14554 1 1 H1.1 1540 aten::Int pnnx_14555 1 1 H1.1 1543 aten::Int pnnx_14556 1 1 H1.1 1546 aten::Int pnnx_14557 1 1 H1.1 1549 aten::Int pnnx_14558 1 1 H1.1 1552 aten::Int pnnx_14559 1 1 H1.1 1555 aten::Int pnnx_14560 1 1 H1.1 1558 aten::Int pnnx_14561 1 1 H1.1 1561 aten::Int pnnx_14562 1 1 H1.1 1564 aten::Int pnnx_14563 1 1 H1.1 1567 aten::Int pnnx_14564 1 1 H1.1 1570 aten::Int pnnx_14565 1 1 H1.1 1573 aten::Int pnnx_14566 1 1 H1.1 1576 aten::Int pnnx_14567 1 1 H1.1 1579 aten::Int pnnx_14568 1 1 H1.1 1582 aten::Int pnnx_14569 1 1 H1.1 1585 aten::Int pnnx_14570 1 1 H1.1 1588 aten::Int pnnx_14571 1 1 H1.1 1591 aten::Int pnnx_14572 1 1 H1.1 1594 aten::Int pnnx_14573 1 1 H1.1 1597 aten::Int pnnx_14574 1 1 H1.1 1600 aten::Int pnnx_14575 1 1 H1.1 1603 aten::Int pnnx_14576 1 1 H1.1 1606 aten::Int pnnx_14577 1 1 H1.1 1609 aten::Int pnnx_14578 1 1 H1.1 1612 aten::Int pnnx_14579 1 1 H1.1 1615 aten::Int pnnx_14580 1 1 H1.1 1618 aten::Int pnnx_14581 1 1 H1.1 1621 aten::Int pnnx_14582 1 1 H1.1 1624 aten::Int pnnx_14583 1 1 H1.1 1627 aten::Int pnnx_14584 1 1 H1.1 1630 aten::Int pnnx_14585 1 1 H1.1 1633 aten::Int pnnx_14586 1 1 H1.1 1636 aten::Int pnnx_14587 1 1 H1.1 1639 aten::Int pnnx_14588 1 1 H1.1 1642 aten::Int pnnx_14589 1 1 H1.1 1645 aten::Int pnnx_14590 1 1 H1.1 1648 aten::Int pnnx_14591 1 1 H1.1 1651 aten::Int pnnx_14592 1 1 H1.1 1654 aten::Int pnnx_14593 1 1 H1.1 1657 aten::Int pnnx_14594 1 1 H1.1 1660 aten::Int pnnx_14595 1 1 H1.1 1663 aten::Int pnnx_14596 1 1 H1.1 1666 aten::Int pnnx_14597 1 1 H1.1 1669 aten::Int pnnx_14598 1 1 H1.1 1672 aten::Int pnnx_14599 1 1 H1.1 1675 aten::Int pnnx_14600 1 1 H1.1 1678 aten::Int pnnx_14601 1 1 H1.1 1681 aten::Int pnnx_14602 1 1 H1.1 1684 aten::Int pnnx_14603 1 1 H1.1 1687 aten::Int pnnx_14604 1 1 H1.1 1690 aten::Int pnnx_14605 1 1 H1.1 1693 aten::Int pnnx_14606 1 1 H1.1 1696 aten::Int pnnx_14607 1 1 H1.1 1699 aten::Int pnnx_14608 1 1 H1.1 1702 aten::Int pnnx_14609 1 1 H1.1 1705 aten::Int pnnx_14610 1 1 H1.1 1708 aten::Int pnnx_14611 1 1 H1.1 1711 aten::Int pnnx_14612 1 1 H1.1 1714 aten::Int pnnx_14613 1 1 H1.1 1717 aten::Int pnnx_14614 1 1 H1.1 1720 aten::Int pnnx_14615 1 1 H1.1 1723 aten::Int pnnx_14616 1 1 H1.1 1726 aten::Int pnnx_14617 1 1 H1.1 1729 aten::Int pnnx_14618 1 1 H1.1 1732 aten::Int pnnx_14619 1 1 H1.1 1735 aten::Int pnnx_14620 1 1 H1.1 1738 aten::Int pnnx_14621 1 1 H1.1 1741 aten::Int pnnx_14622 1 1 H1.1 1744 aten::Int pnnx_14623 1 1 H1.1 1747 aten::Int pnnx_14624 1 1 H1.1 1750 aten::Int pnnx_14625 1 1 H1.1 1753 aten::Int pnnx_14626 1 1 H1.1 1756 aten::Int pnnx_14627 1 1 H1.1 1759 aten::Int pnnx_14628 1 1 H1.1 1762 aten::Int pnnx_14629 1 1 H1.1 1765 aten::Int pnnx_14630 1 1 H1.1 1768 aten::Int pnnx_14631 1 1 H1.1 1771 prim::Constant pnnx_14632 0 1 24620 value=3 aten::size pnnx_14633 2 1 14094 24620 1773 #14094=(1,192,48,48)f32 prim::NumToTensor pnnx_14634 1 1 1773 W1.1 aten::Int pnnx_14635 1 1 W1.1 1777 aten::Int pnnx_14636 1 1 W1.1 1780 aten::Int pnnx_14637 1 1 W1.1 1783 aten::Int pnnx_14638 1 1 W1.1 1786 aten::Int pnnx_14639 1 1 W1.1 1789 aten::Int pnnx_14640 1 1 W1.1 1792 aten::Int pnnx_14641 1 1 W1.1 1795 aten::Int pnnx_14642 1 1 W1.1 1798 aten::Int pnnx_14643 1 1 W1.1 1801 aten::Int pnnx_14644 1 1 W1.1 1804 aten::Int pnnx_14645 1 1 W1.1 1807 aten::Int pnnx_14646 1 1 W1.1 1810 aten::Int pnnx_14647 1 1 W1.1 1813 aten::Int pnnx_14648 1 1 W1.1 1816 aten::Int pnnx_14649 1 1 W1.1 1819 aten::Int pnnx_14650 1 1 W1.1 1822 aten::Int pnnx_14651 1 1 W1.1 1825 aten::Int pnnx_14652 1 1 W1.1 1828 aten::Int pnnx_14653 1 1 W1.1 1831 aten::Int pnnx_14654 1 1 W1.1 1834 aten::Int pnnx_14655 1 1 W1.1 1837 aten::Int pnnx_14656 1 1 W1.1 1840 aten::Int pnnx_14657 1 1 W1.1 1843 aten::Int pnnx_14658 1 1 W1.1 1846 aten::Int pnnx_14659 1 1 W1.1 1849 aten::Int pnnx_14660 1 1 W1.1 1852 aten::Int pnnx_14661 1 1 W1.1 1855 aten::Int pnnx_14662 1 1 W1.1 1858 aten::Int pnnx_14663 1 1 W1.1 1861 aten::Int pnnx_14664 1 1 W1.1 1864 aten::Int pnnx_14665 1 1 W1.1 1867 aten::Int pnnx_14666 1 1 W1.1 1870 aten::Int pnnx_14667 1 1 W1.1 1873 aten::Int pnnx_14668 1 1 W1.1 1876 aten::Int pnnx_14669 1 1 W1.1 1879 aten::Int pnnx_14670 1 1 W1.1 1882 aten::Int pnnx_14671 1 1 W1.1 1885 aten::Int pnnx_14672 1 1 W1.1 1888 aten::Int pnnx_14673 1 1 W1.1 1891 aten::Int pnnx_14674 1 1 W1.1 1894 aten::Int pnnx_14675 1 1 W1.1 1897 aten::Int pnnx_14676 1 1 W1.1 1900 aten::Int pnnx_14677 1 1 W1.1 1903 aten::Int pnnx_14678 1 1 W1.1 1906 aten::Int pnnx_14679 1 1 W1.1 1909 aten::Int pnnx_14680 1 1 W1.1 1912 aten::Int pnnx_14681 1 1 W1.1 1915 aten::Int pnnx_14682 1 1 W1.1 1918 aten::Int pnnx_14683 1 1 W1.1 1921 aten::Int pnnx_14684 1 1 W1.1 1924 aten::Int pnnx_14685 1 1 W1.1 1927 aten::Int pnnx_14686 1 1 W1.1 1930 aten::Int pnnx_14687 1 1 W1.1 1933 aten::Int pnnx_14688 1 1 W1.1 1936 aten::Int pnnx_14689 1 1 W1.1 1939 aten::Int pnnx_14690 1 1 W1.1 1942 aten::Int pnnx_14691 1 1 W1.1 1945 aten::Int pnnx_14692 1 1 W1.1 1948 aten::Int pnnx_14693 1 1 W1.1 1951 aten::Int pnnx_14694 1 1 W1.1 1954 aten::Int pnnx_14695 1 1 W1.1 1957 aten::Int pnnx_14696 1 1 W1.1 1960 aten::Int pnnx_14697 1 1 W1.1 1963 aten::Int pnnx_14698 1 1 W1.1 1966 aten::Int pnnx_14699 1 1 W1.1 1969 aten::Int pnnx_14700 1 1 W1.1 1972 aten::Int pnnx_14701 1 1 W1.1 1975 aten::Int pnnx_14702 1 1 W1.1 1978 aten::Int pnnx_14703 1 1 W1.1 1981 aten::Int pnnx_14704 1 1 W1.1 1984 aten::Int pnnx_14705 1 1 W1.1 1987 aten::Int pnnx_14706 1 1 W1.1 1990 aten::Int pnnx_14707 1 1 W1.1 1993 aten::Int pnnx_14708 1 1 W1.1 1996 aten::Int pnnx_14709 1 1 W1.1 1999 aten::Int pnnx_14710 1 1 W1.1 2002 aten::Int pnnx_14711 1 1 W1.1 2005 aten::Int pnnx_14712 1 1 W1.1 2008 aten::Int pnnx_14713 1 1 W1.1 2011 prim::Constant pnnx_14714 0 1 14095 value=-1 prim::Constant pnnx_14715 0 1 14096 value=2 prim::Constant pnnx_14716 0 1 14097 value=1 prim::Constant pnnx_14718 0 1 24621 value=2 torch.flatten torch.flatten_2197 3 1 14094 14096 14095 14099 $input=14094 $start_dim=14096 $end_dim=14095 #14094=(1,192,48,48)f32 #14099=(1,192,2304)f32 torch.transpose torch.transpose_3132 3 1 14099 14097 24621 input.331 $input=14099 $dim0=14097 $dim1=24621 #14099=(1,192,2304)f32 #input.331=(1,2304,192)f32 nn.LayerNorm patch_embed_mmsa.norm 1 1 input.331 14101 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.331=(1,2304,192)f32 #14101=(1,2304,192)f32 nn.Dropout pos_drop_mmsa 1 1 14101 2016 #14101=(1,2304,192)f32 #2016=(1,2304,192)f32 prim::Constant pnnx_14720 0 1 14102 value=1 prim::Constant pnnx_14721 0 1 14119 value=trunc prim::Constant pnnx_14722 0 1 14120 value=8 prim::Constant pnnx_14723 0 1 14121 value=0 prim::Constant pnnx_14724 0 1 14122 value=2 prim::Constant pnnx_14725 0 1 14123 value=1 prim::Constant pnnx_14726 0 1 14124 value=3 prim::Constant pnnx_14727 0 1 14125 value=8 prim::Constant pnnx_14728 0 1 14126 value=4 prim::Constant pnnx_14729 0 1 14127 value=5 prim::Constant pnnx_14730 0 1 14128 value=-1 prim::Constant pnnx_14731 0 1 14129 value=64 aten::size pnnx_14732 2 1 2016 14121 14135 #2016=(1,2304,192)f32 prim::NumToTensor pnnx_14733 1 1 14135 B.177 aten::Int pnnx_14734 1 1 B.177 14137 aten::Int pnnx_14735 1 1 B.177 14138 aten::size pnnx_14736 2 1 2016 14122 14139 #2016=(1,2304,192)f32 prim::NumToTensor pnnx_14737 1 1 14139 C.299 aten::Int pnnx_14738 1 1 C.299 14141 aten::Int pnnx_14739 1 1 C.299 14142 aten::Int pnnx_14740 1 1 C.299 14143 aten::Int pnnx_14741 1 1 C.299 14144 nn.LayerNorm layers_mmsa.0.residual_group.blocks.0.norm1 1 1 2016 14145 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #2016=(1,2304,192)f32 #14145=(1,2304,192)f32 prim::ListConstruct pnnx_14742 4 1 14138 1771 2011 14144 14146 prim::Constant pnnx_14744 0 1 24622 value=0 Tensor.view Tensor.view_1780 2 1 14145 14146 x.147 $input=14145 $shape=14146 #14145=(1,2304,192)f32 #x.147=(1,48,48,192)f32 aten::size pnnx_14745 2 1 x.147 24622 14148 #x.147=(1,48,48,192)f32 prim::NumToTensor pnnx_14746 1 1 14148 B0.75 aten::Int pnnx_14747 1 1 B0.75 14150 aten::size pnnx_14748 2 1 x.147 14123 14151 #x.147=(1,48,48,192)f32 prim::NumToTensor pnnx_14749 1 1 14151 14152 prim::Constant pnnx_14750 0 1 24623 value=2 aten::size pnnx_14751 2 1 x.147 24623 14153 #x.147=(1,48,48,192)f32 prim::NumToTensor pnnx_14752 1 1 14153 14154 aten::size pnnx_14753 2 1 x.147 14124 14155 #x.147=(1,48,48,192)f32 prim::NumToTensor pnnx_14754 1 1 14155 C0.75 aten::Int pnnx_14755 1 1 C0.75 14157 aten::Int pnnx_14756 1 1 C0.75 14158 aten::div pnnx_14757 3 1 14152 14120 14119 14159 aten::Int pnnx_14758 1 1 14159 14160 prim::Constant pnnx_14759 0 1 24624 value=8 prim::Constant pnnx_14760 0 1 24625 value=trunc aten::div pnnx_14761 3 1 14154 24624 24625 14161 aten::Int pnnx_14762 1 1 14161 14162 prim::Constant pnnx_14763 0 1 24626 value=8 prim::ListConstruct pnnx_14764 6 1 14150 14160 14125 14162 24626 14158 14163 prim::Constant pnnx_14766 0 1 24627 value=0 prim::Constant pnnx_14767 0 1 24628 value=1 prim::Constant pnnx_14768 0 1 24629 value=3 prim::Constant pnnx_14769 0 1 24630 value=2 prim::ListConstruct pnnx_14770 6 1 24627 24628 24629 24630 14126 14127 14165 Tensor.view Tensor.view_1781 2 1 x.147 14163 x0.75 $input=x.147 $shape=14163 #x.147=(1,48,48,192)f32 #x0.75=(1,6,8,6,8,192)f32 prim::Constant pnnx_14774 0 1 24632 value=8 prim::Constant pnnx_14775 0 1 24633 value=8 prim::ListConstruct pnnx_14776 4 1 14128 24632 24633 14157 14168 torch.permute torch.permute_2816 2 1 x0.75 14165 14166 $input=x0.75 $dims=14165 #x0.75=(1,6,8,6,8,192)f32 #14166=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_218 1 1 14166 14167 memory_format=torch.contiguous_format $input=14166 #14166=(1,6,6,8,8,192)f32 #14167=(1,6,6,8,8,192)f32 prim::Constant pnnx_14778 0 1 24634 value=-1 prim::ListConstruct pnnx_14779 3 1 24634 14129 14143 14170 prim::Constant pnnx_14781 0 1 14172 value=1.767767e-01 prim::Constant pnnx_14782 0 1 14173 value=trunc prim::Constant pnnx_14783 0 1 14174 value=6 prim::Constant pnnx_14784 0 1 14175 value=0 prim::Constant pnnx_14785 0 1 14176 value=1 prim::Constant pnnx_14786 0 1 14177 value=2 prim::Constant pnnx_14787 0 1 14178 value=3 prim::Constant pnnx_14788 0 1 14179 value=6 prim::Constant pnnx_14789 0 1 14180 value=4 prim::Constant pnnx_14790 0 1 14181 value=-2 prim::Constant pnnx_14791 0 1 14182 value=-1 prim::Constant pnnx_14792 0 1 14183 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.0.attn 0 1 relative_position_bias_table.147 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.147=(225,6)f32 pnnx.Attribute layers_mmsa.0.residual_group.blocks.0.attn 0 1 relative_position_index.147 @relative_position_index=(64,64)i64 #relative_position_index.147=(64,64)i64 Tensor.view Tensor.view_1782 2 1 14167 14168 x_windows.147 $input=14167 $shape=14168 #14167=(1,6,6,8,8,192)f32 #x_windows.147=(36,8,8,192)f32 Tensor.view Tensor.view_1783 2 1 x_windows.147 14170 x1.75 $input=x_windows.147 $shape=14170 #x_windows.147=(36,8,8,192)f32 #x1.75=(36,64,192)f32 aten::size pnnx_14793 2 1 x1.75 14175 14191 #x1.75=(36,64,192)f32 prim::NumToTensor pnnx_14794 1 1 14191 B_.147 aten::Int pnnx_14795 1 1 B_.147 14193 aten::Int pnnx_14796 1 1 B_.147 14194 aten::size pnnx_14797 2 1 x1.75 14176 14195 #x1.75=(36,64,192)f32 prim::NumToTensor pnnx_14798 1 1 14195 N.147 aten::Int pnnx_14799 1 1 N.147 14197 aten::Int pnnx_14800 1 1 N.147 14198 aten::size pnnx_14801 2 1 x1.75 14177 14199 #x1.75=(36,64,192)f32 prim::NumToTensor pnnx_14802 1 1 14199 C.301 aten::Int pnnx_14803 1 1 C.301 14201 nn.Linear layers_mmsa.0.residual_group.blocks.0.attn.qkv 1 1 x1.75 14202 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.75=(36,64,192)f32 #14202=(36,64,576)f32 aten::div pnnx_14804 3 1 C.301 14174 14173 14203 aten::Int pnnx_14805 1 1 14203 14204 prim::ListConstruct pnnx_14806 5 1 14194 14198 14178 14179 14204 14205 prim::Constant pnnx_14808 0 1 24635 value=2 prim::Constant pnnx_14809 0 1 24636 value=0 prim::Constant pnnx_14810 0 1 24637 value=3 prim::Constant pnnx_14811 0 1 24638 value=1 prim::ListConstruct pnnx_14812 5 1 24635 24636 24637 24638 14180 14207 Tensor.reshape Tensor.reshape_578 2 1 14202 14205 14206 $input=14202 $shape=14205 #14202=(36,64,576)f32 #14206=(36,64,3,6,32)f32 prim::Constant pnnx_14814 0 1 24639 value=0 prim::Constant pnnx_14815 0 1 24640 value=0 prim::Constant pnnx_14817 0 1 24641 value=0 prim::Constant pnnx_14818 0 1 24642 value=1 prim::Constant pnnx_14820 0 1 24643 value=0 prim::Constant pnnx_14821 0 1 24644 value=2 torch.permute torch.permute_2817 2 1 14206 14207 qkv0.75 $input=14206 $dims=14207 #14206=(36,64,3,6,32)f32 #qkv0.75=(3,36,6,64,32)f32 Tensor.select Tensor.select_866 3 1 qkv0.75 24639 24640 q.147 $input=qkv0.75 $dim=24639 $index=24640 #qkv0.75=(3,36,6,64,32)f32 #q.147=(36,6,64,32)f32 aten::mul pnnx_14823 2 1 q.147 14172 q0.75 #q.147=(36,6,64,32)f32 #q0.75=(36,6,64,32)f32 Tensor.select Tensor.select_867 3 1 qkv0.75 24641 24642 k.147 $input=qkv0.75 $dim=24641 $index=24642 #qkv0.75=(3,36,6,64,32)f32 #k.147=(36,6,64,32)f32 prim::Constant pnnx_14826 0 1 24645 value=-1 prim::ListConstruct pnnx_14827 1 1 24645 14215 Tensor.view Tensor.view_1784 2 1 relative_position_index.147 14215 14216 $input=relative_position_index.147 $shape=14215 #relative_position_index.147=(64,64)i64 #14216=(4096)i64 prim::ListConstruct pnnx_14829 1 1 14216 14217 #14216=(4096)i64 prim::Constant pnnx_14831 0 1 24646 value=64 prim::Constant pnnx_14832 0 1 24647 value=-1 prim::ListConstruct pnnx_14833 3 1 14183 24646 24647 14219 Tensor.index Tensor.index_398 2 1 relative_position_bias_table.147 14217 14218 $input=relative_position_bias_table.147 $expr=14217 #relative_position_bias_table.147=(225,6)f32 #14218=(4096,6)f32 prim::Constant pnnx_14835 0 1 24648 value=2 prim::Constant pnnx_14836 0 1 24649 value=0 prim::Constant pnnx_14837 0 1 24650 value=1 prim::ListConstruct pnnx_14838 3 1 24648 24649 24650 14221 Tensor.view Tensor.view_1785 2 1 14218 14219 relative_position_bias.147 $input=14218 $shape=14219 #14218=(4096,6)f32 #relative_position_bias.147=(64,64,6)f32 prim::Constant pnnx_14842 0 1 24652 value=0 torch.permute torch.permute_2818 2 1 relative_position_bias.147 14221 14222 $input=relative_position_bias.147 $dims=14221 #relative_position_bias.147=(64,64,6)f32 #14222=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_219 1 1 14222 relative_position_bias0.75 memory_format=torch.contiguous_format $input=14222 #14222=(6,64,64)f32 #relative_position_bias0.75=(6,64,64)f32 prim::Constant pnnx_14844 0 1 24653 value=1 torch.transpose torch.transpose_3133 3 1 k.147 14181 14182 14213 $input=k.147 $dim0=14181 $dim1=14182 #k.147=(36,6,64,32)f32 #14213=(36,6,32,64)f32 torch.matmul torch.matmul_2348 2 1 q0.75 14213 attn.295 $input=q0.75 $other=14213 #q0.75=(36,6,64,32)f32 #14213=(36,6,32,64)f32 #attn.295=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3362 2 1 relative_position_bias0.75 24652 14224 $input=relative_position_bias0.75 $dim=24652 #relative_position_bias0.75=(6,64,64)f32 #14224=(1,6,64,64)f32 aten::add pnnx_14845 3 1 attn.295 14224 24653 input.333 #attn.295=(36,6,64,64)f32 #14224=(1,6,64,64)f32 #input.333=(36,6,64,64)f32 nn.Softmax layers_mmsa.0.residual_group.blocks.0.attn.softmax 1 1 input.333 14226 dim=-1 #input.333=(36,6,64,64)f32 #14226=(36,6,64,64)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.0.attn.attn_drop 1 1 14226 14227 #14226=(36,6,64,64)f32 #14227=(36,6,64,64)f32 Tensor.select Tensor.select_868 3 1 qkv0.75 24643 24644 v.147 $input=qkv0.75 $dim=24643 $index=24644 #qkv0.75=(3,36,6,64,32)f32 #v.147=(36,6,64,32)f32 prim::Constant pnnx_14847 0 1 24654 value=1 prim::Constant pnnx_14848 0 1 24655 value=2 torch.matmul torch.matmul_2349 2 1 14227 v.147 14228 $input=14227 $other=v.147 #14227=(36,6,64,64)f32 #v.147=(36,6,64,32)f32 #14228=(36,6,64,32)f32 prim::ListConstruct pnnx_14850 3 1 14193 14197 14201 14230 torch.transpose torch.transpose_3134 3 1 14228 24654 24655 14229 $input=14228 $dim0=24654 $dim1=24655 #14228=(36,6,64,32)f32 #14229=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_579 2 1 14229 14230 input0.79 $input=14229 $shape=14230 #14229=(36,64,6,32)f32 #input0.79=(36,64,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.0.attn.proj 1 1 input0.79 14232 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.79=(36,64,192)f32 #14232=(36,64,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.0.attn.proj_drop 1 1 14232 14233 #14232=(36,64,192)f32 #14233=(36,64,192)f32 prim::Constant pnnx_14852 0 1 24656 value=-1 prim::Constant pnnx_14853 0 1 24657 value=8 prim::Constant pnnx_14854 0 1 24658 value=8 prim::ListConstruct pnnx_14855 4 1 24656 24657 24658 14142 14234 prim::Constant pnnx_14857 0 1 24659 value=8 prim::Constant pnnx_14858 0 1 24660 value=trunc aten::div pnnx_14859 3 1 H1.1 24659 24660 14236 aten::Int pnnx_14860 1 1 14236 14237 prim::Constant pnnx_14861 0 1 24661 value=8 prim::Constant pnnx_14862 0 1 24662 value=trunc aten::div pnnx_14863 3 1 W1.1 24661 24662 14238 aten::Int pnnx_14864 1 1 14238 14239 prim::Constant pnnx_14865 0 1 24663 value=1 prim::Constant pnnx_14866 0 1 24664 value=8 prim::Constant pnnx_14867 0 1 24665 value=8 prim::Constant pnnx_14868 0 1 24666 value=-1 prim::ListConstruct pnnx_14869 6 1 24663 14237 14239 24664 24665 24666 14240 prim::Constant pnnx_14871 0 1 24667 value=0 prim::Constant pnnx_14872 0 1 24668 value=1 prim::Constant pnnx_14873 0 1 24669 value=3 prim::Constant pnnx_14874 0 1 24670 value=2 prim::Constant pnnx_14875 0 1 24671 value=4 prim::Constant pnnx_14876 0 1 24672 value=5 prim::ListConstruct pnnx_14877 6 1 24667 24668 24669 24670 24671 24672 14242 Tensor.view Tensor.view_1786 2 1 14233 14234 windows.147 $input=14233 $shape=14234 #14233=(36,64,192)f32 #windows.147=(36,8,8,192)f32 Tensor.view Tensor.view_1787 2 1 windows.147 14240 x2.75 $input=windows.147 $shape=14240 #windows.147=(36,8,8,192)f32 #x2.75=(1,6,6,8,8,192)f32 prim::Constant pnnx_14881 0 1 24674 value=1 prim::Constant pnnx_14882 0 1 24675 value=-1 prim::ListConstruct pnnx_14883 4 1 24674 1768 2008 24675 14245 torch.permute torch.permute_2819 2 1 x2.75 14242 14243 $input=x2.75 $dims=14242 #x2.75=(1,6,6,8,8,192)f32 #14243=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_220 1 1 14243 14244 memory_format=torch.contiguous_format $input=14243 #14243=(1,6,8,6,8,192)f32 #14244=(1,6,8,6,8,192)f32 aten::mul pnnx_14885 2 1 H1.1 W1.1 14247 aten::Int pnnx_14886 1 1 14247 14248 prim::ListConstruct pnnx_14887 3 1 14137 14248 14141 14249 prim::Constant pnnx_14889 0 1 14251 value=None prim::Constant pnnx_14890 0 1 24676 value=1 Tensor.view Tensor.view_1788 2 1 14244 14245 x3.75 $input=14244 $shape=14245 #14244=(1,6,8,6,8,192)f32 #x3.75=(1,48,48,192)f32 Tensor.view Tensor.view_1789 2 1 x3.75 14249 x4.75 $input=x3.75 $shape=14249 #x3.75=(1,48,48,192)f32 #x4.75=(1,2304,192)f32 aten::add pnnx_14891 3 1 2016 x4.75 24676 input.335 #2016=(1,2304,192)f32 #x4.75=(1,2304,192)f32 #input.335=(1,2304,192)f32 nn.LayerNorm layers_mmsa.0.residual_group.blocks.0.norm2 1 1 input.335 14253 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.335=(1,2304,192)f32 #14253=(1,2304,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.0.mlp.fc1 1 1 14253 14258 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #14253=(1,2304,192)f32 #14258=(1,2304,384)f32 nn.GELU layers_mmsa.0.residual_group.blocks.0.mlp.act 1 1 14258 14259 #14258=(1,2304,384)f32 #14259=(1,2304,384)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.0.mlp.drop 1 1 14259 14260 #14259=(1,2304,384)f32 #14260=(1,2304,384)f32 nn.Linear layers_mmsa.0.residual_group.blocks.0.mlp.fc2 1 1 14260 14261 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #14260=(1,2304,384)f32 #14261=(1,2304,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.0.mlp.drop 1 1 14261 14262 #14261=(1,2304,192)f32 #14262=(1,2304,192)f32 prim::Constant pnnx_14892 0 1 14263 value=None prim::Constant pnnx_14893 0 1 24677 value=1 aten::add pnnx_14894 3 1 input.335 14262 24677 14264 #input.335=(1,2304,192)f32 #14262=(1,2304,192)f32 #14264=(1,2304,192)f32 prim::Constant pnnx_14895 0 1 14265 value=trunc prim::Constant pnnx_14896 0 1 14266 value=8 prim::Constant pnnx_14897 0 1 14267 value=0 prim::Constant pnnx_14898 0 1 14268 value=2 prim::Constant pnnx_14899 0 1 14269 value=-4 prim::Constant pnnx_14900 0 1 14270 value=1 prim::Constant pnnx_14901 0 1 14271 value=3 prim::Constant pnnx_14902 0 1 14272 value=8 prim::Constant pnnx_14903 0 1 14273 value=4 prim::Constant pnnx_14904 0 1 14274 value=5 prim::Constant pnnx_14905 0 1 14275 value=-1 prim::Constant pnnx_14906 0 1 14276 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.1 0 1 attn_mask.75 @attn_mask=(36,64,64)f32 #attn_mask.75=(36,64,64)f32 aten::size pnnx_14907 2 1 14264 14267 14283 #14264=(1,2304,192)f32 prim::NumToTensor pnnx_14908 1 1 14283 B.179 aten::Int pnnx_14909 1 1 B.179 14285 aten::Int pnnx_14910 1 1 B.179 14286 aten::size pnnx_14911 2 1 14264 14268 14287 #14264=(1,2304,192)f32 prim::NumToTensor pnnx_14912 1 1 14287 C.303 aten::Int pnnx_14913 1 1 C.303 14289 aten::Int pnnx_14914 1 1 C.303 14290 aten::Int pnnx_14915 1 1 C.303 14291 aten::Int pnnx_14916 1 1 C.303 14292 nn.LayerNorm layers_mmsa.0.residual_group.blocks.1.norm1 1 1 14264 14293 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #14264=(1,2304,192)f32 #14293=(1,2304,192)f32 prim::ListConstruct pnnx_14917 4 1 14286 1765 2005 14292 14294 prim::Constant pnnx_14919 0 1 24678 value=-4 prim::ListConstruct pnnx_14920 2 1 14269 24678 14296 prim::Constant pnnx_14921 0 1 24679 value=2 prim::ListConstruct pnnx_14922 2 1 14270 24679 14297 Tensor.view Tensor.view_1790 2 1 14293 14294 x.149 $input=14293 $shape=14294 #14293=(1,2304,192)f32 #x.149=(1,48,48,192)f32 prim::Constant pnnx_14924 0 1 24680 value=0 torch.roll torch.roll_2492 3 1 x.149 14296 14297 x0.77 $input=x.149 $shifts=14296 $dims=14297 #x.149=(1,48,48,192)f32 #x0.77=(1,48,48,192)f32 aten::size pnnx_14925 2 1 x0.77 24680 14299 #x0.77=(1,48,48,192)f32 prim::NumToTensor pnnx_14926 1 1 14299 B0.77 aten::Int pnnx_14927 1 1 B0.77 14301 prim::Constant pnnx_14928 0 1 24681 value=1 aten::size pnnx_14929 2 1 x0.77 24681 14302 #x0.77=(1,48,48,192)f32 prim::NumToTensor pnnx_14930 1 1 14302 14303 prim::Constant pnnx_14931 0 1 24682 value=2 aten::size pnnx_14932 2 1 x0.77 24682 14304 #x0.77=(1,48,48,192)f32 prim::NumToTensor pnnx_14933 1 1 14304 14305 aten::size pnnx_14934 2 1 x0.77 14271 14306 #x0.77=(1,48,48,192)f32 prim::NumToTensor pnnx_14935 1 1 14306 C0.77 aten::Int pnnx_14936 1 1 C0.77 14308 aten::Int pnnx_14937 1 1 C0.77 14309 aten::div pnnx_14938 3 1 14303 14266 14265 14310 aten::Int pnnx_14939 1 1 14310 14311 prim::Constant pnnx_14940 0 1 24683 value=8 prim::Constant pnnx_14941 0 1 24684 value=trunc aten::div pnnx_14942 3 1 14305 24683 24684 14312 aten::Int pnnx_14943 1 1 14312 14313 prim::Constant pnnx_14944 0 1 24685 value=8 prim::ListConstruct pnnx_14945 6 1 14301 14311 14272 14313 24685 14309 14314 prim::Constant pnnx_14947 0 1 24686 value=0 prim::Constant pnnx_14948 0 1 24687 value=1 prim::Constant pnnx_14949 0 1 24688 value=3 prim::Constant pnnx_14950 0 1 24689 value=2 prim::ListConstruct pnnx_14951 6 1 24686 24687 24688 24689 14273 14274 14316 Tensor.view Tensor.view_1791 2 1 x0.77 14314 x1.77 $input=x0.77 $shape=14314 #x0.77=(1,48,48,192)f32 #x1.77=(1,6,8,6,8,192)f32 prim::Constant pnnx_14955 0 1 24691 value=8 prim::Constant pnnx_14956 0 1 24692 value=8 prim::ListConstruct pnnx_14957 4 1 14275 24691 24692 14308 14319 torch.permute torch.permute_2820 2 1 x1.77 14316 14317 $input=x1.77 $dims=14316 #x1.77=(1,6,8,6,8,192)f32 #14317=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_221 1 1 14317 14318 memory_format=torch.contiguous_format $input=14317 #14317=(1,6,6,8,8,192)f32 #14318=(1,6,6,8,8,192)f32 prim::Constant pnnx_14959 0 1 24693 value=-1 prim::ListConstruct pnnx_14960 3 1 24693 14276 14291 14321 prim::Constant pnnx_14962 0 1 14323 value=1.767767e-01 prim::Constant pnnx_14963 0 1 14324 value=trunc prim::Constant pnnx_14964 0 1 14325 value=6 prim::Constant pnnx_14965 0 1 14326 value=0 prim::Constant pnnx_14966 0 1 14327 value=1 prim::Constant pnnx_14967 0 1 14328 value=2 prim::Constant pnnx_14968 0 1 14329 value=3 prim::Constant pnnx_14969 0 1 14330 value=6 prim::Constant pnnx_14970 0 1 14331 value=4 prim::Constant pnnx_14971 0 1 14332 value=-2 prim::Constant pnnx_14972 0 1 14333 value=-1 prim::Constant pnnx_14973 0 1 14334 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.1.attn 0 1 relative_position_bias_table.149 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.149=(225,6)f32 pnnx.Attribute layers_mmsa.0.residual_group.blocks.1.attn 0 1 relative_position_index.149 @relative_position_index=(64,64)i64 #relative_position_index.149=(64,64)i64 Tensor.view Tensor.view_1792 2 1 14318 14319 x_windows.149 $input=14318 $shape=14319 #14318=(1,6,6,8,8,192)f32 #x_windows.149=(36,8,8,192)f32 Tensor.view Tensor.view_1793 2 1 x_windows.149 14321 x2.77 $input=x_windows.149 $shape=14321 #x_windows.149=(36,8,8,192)f32 #x2.77=(36,64,192)f32 aten::size pnnx_14974 2 1 x2.77 14326 14342 #x2.77=(36,64,192)f32 prim::NumToTensor pnnx_14975 1 1 14342 B_.149 aten::Int pnnx_14976 1 1 B_.149 14344 aten::Int pnnx_14977 1 1 B_.149 14345 aten::size pnnx_14978 2 1 x2.77 14327 14346 #x2.77=(36,64,192)f32 prim::NumToTensor pnnx_14979 1 1 14346 N.149 aten::Int pnnx_14980 1 1 N.149 14348 aten::Int pnnx_14981 1 1 N.149 14349 aten::Int pnnx_14982 1 1 N.149 14350 aten::Int pnnx_14983 1 1 N.149 14351 aten::Int pnnx_14984 1 1 N.149 14352 aten::Int pnnx_14985 1 1 N.149 14353 aten::size pnnx_14986 2 1 x2.77 14328 14354 #x2.77=(36,64,192)f32 prim::NumToTensor pnnx_14987 1 1 14354 C.305 aten::Int pnnx_14988 1 1 C.305 14356 nn.Linear layers_mmsa.0.residual_group.blocks.1.attn.qkv 1 1 x2.77 14357 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.77=(36,64,192)f32 #14357=(36,64,576)f32 aten::div pnnx_14989 3 1 C.305 14325 14324 14358 aten::Int pnnx_14990 1 1 14358 14359 prim::ListConstruct pnnx_14991 5 1 14345 14353 14329 14330 14359 14360 prim::Constant pnnx_14993 0 1 24694 value=2 prim::Constant pnnx_14994 0 1 24695 value=0 prim::Constant pnnx_14995 0 1 24696 value=3 prim::Constant pnnx_14996 0 1 24697 value=1 prim::ListConstruct pnnx_14997 5 1 24694 24695 24696 24697 14331 14362 Tensor.reshape Tensor.reshape_580 2 1 14357 14360 14361 $input=14357 $shape=14360 #14357=(36,64,576)f32 #14361=(36,64,3,6,32)f32 prim::Constant pnnx_14999 0 1 24698 value=0 prim::Constant pnnx_15000 0 1 24699 value=0 prim::Constant pnnx_15002 0 1 24700 value=0 prim::Constant pnnx_15003 0 1 24701 value=1 prim::Constant pnnx_15005 0 1 24702 value=0 prim::Constant pnnx_15006 0 1 24703 value=2 torch.permute torch.permute_2821 2 1 14361 14362 qkv0.77 $input=14361 $dims=14362 #14361=(36,64,3,6,32)f32 #qkv0.77=(3,36,6,64,32)f32 Tensor.select Tensor.select_869 3 1 qkv0.77 24698 24699 q.149 $input=qkv0.77 $dim=24698 $index=24699 #qkv0.77=(3,36,6,64,32)f32 #q.149=(36,6,64,32)f32 aten::mul pnnx_15008 2 1 q.149 14323 q0.77 #q.149=(36,6,64,32)f32 #q0.77=(36,6,64,32)f32 Tensor.select Tensor.select_870 3 1 qkv0.77 24700 24701 k.149 $input=qkv0.77 $dim=24700 $index=24701 #qkv0.77=(3,36,6,64,32)f32 #k.149=(36,6,64,32)f32 prim::Constant pnnx_15011 0 1 24704 value=-1 prim::ListConstruct pnnx_15012 1 1 24704 14370 Tensor.view Tensor.view_1794 2 1 relative_position_index.149 14370 14371 $input=relative_position_index.149 $shape=14370 #relative_position_index.149=(64,64)i64 #14371=(4096)i64 prim::ListConstruct pnnx_15014 1 1 14371 14372 #14371=(4096)i64 prim::Constant pnnx_15016 0 1 24705 value=64 prim::Constant pnnx_15017 0 1 24706 value=-1 prim::ListConstruct pnnx_15018 3 1 14334 24705 24706 14374 Tensor.index Tensor.index_399 2 1 relative_position_bias_table.149 14372 14373 $input=relative_position_bias_table.149 $expr=14372 #relative_position_bias_table.149=(225,6)f32 #14373=(4096,6)f32 prim::Constant pnnx_15020 0 1 24707 value=2 prim::Constant pnnx_15021 0 1 24708 value=0 prim::Constant pnnx_15022 0 1 24709 value=1 prim::ListConstruct pnnx_15023 3 1 24707 24708 24709 14376 Tensor.view Tensor.view_1795 2 1 14373 14374 relative_position_bias.149 $input=14373 $shape=14374 #14373=(4096,6)f32 #relative_position_bias.149=(64,64,6)f32 prim::Constant pnnx_15027 0 1 24711 value=0 torch.permute torch.permute_2822 2 1 relative_position_bias.149 14376 14377 $input=relative_position_bias.149 $dims=14376 #relative_position_bias.149=(64,64,6)f32 #14377=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_222 1 1 14377 relative_position_bias0.77 memory_format=torch.contiguous_format $input=14377 #14377=(6,64,64)f32 #relative_position_bias0.77=(6,64,64)f32 prim::Constant pnnx_15029 0 1 24712 value=1 torch.transpose torch.transpose_3135 3 1 k.149 14332 14333 14368 $input=k.149 $dim0=14332 $dim1=14333 #k.149=(36,6,64,32)f32 #14368=(36,6,32,64)f32 torch.matmul torch.matmul_2350 2 1 q0.77 14368 attn.299 $input=q0.77 $other=14368 #q0.77=(36,6,64,32)f32 #14368=(36,6,32,64)f32 #attn.299=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3363 2 1 relative_position_bias0.77 24711 14379 $input=relative_position_bias0.77 $dim=24711 #relative_position_bias0.77=(6,64,64)f32 #14379=(1,6,64,64)f32 aten::add pnnx_15030 3 1 attn.299 14379 24712 attn0.39 #attn.299=(36,6,64,64)f32 #14379=(1,6,64,64)f32 #attn0.39=(36,6,64,64)f32 prim::Constant pnnx_15031 0 1 24713 value=0 aten::size pnnx_15032 2 1 attn_mask.75 24713 14381 #attn_mask.75=(36,64,64)f32 prim::NumToTensor pnnx_15033 1 1 14381 other.75 aten::Int pnnx_15034 1 1 other.75 14383 prim::Constant pnnx_15035 0 1 24714 value=trunc aten::div pnnx_15036 3 1 B_.149 other.75 24714 14384 aten::Int pnnx_15037 1 1 14384 14385 prim::Constant pnnx_15038 0 1 24715 value=6 prim::ListConstruct pnnx_15039 5 1 14385 14383 24715 14352 14351 14386 prim::Constant pnnx_15041 0 1 24716 value=1 prim::Constant pnnx_15043 0 1 24717 value=0 prim::Constant pnnx_15045 0 1 24718 value=1 Tensor.view Tensor.view_1796 2 1 attn0.39 14386 14387 $input=attn0.39 $shape=14386 #attn0.39=(36,6,64,64)f32 #14387=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3364 2 1 attn_mask.75 24716 14388 $input=attn_mask.75 $dim=24716 #attn_mask.75=(36,64,64)f32 #14388=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3365 2 1 14388 24717 14389 $input=14388 $dim=24717 #14388=(36,1,64,64)f32 #14389=(1,36,1,64,64)f32 aten::add pnnx_15046 3 1 14387 14389 24718 attn1.39 #14387=(1,36,6,64,64)f32 #14389=(1,36,1,64,64)f32 #attn1.39=(1,36,6,64,64)f32 prim::Constant pnnx_15047 0 1 24719 value=-1 prim::Constant pnnx_15048 0 1 24720 value=6 prim::ListConstruct pnnx_15049 4 1 24719 24720 14350 14349 14391 Tensor.view Tensor.view_1797 2 1 attn1.39 14391 input.337 $input=attn1.39 $shape=14391 #attn1.39=(1,36,6,64,64)f32 #input.337=(36,6,64,64)f32 nn.Softmax layers_mmsa.0.residual_group.blocks.1.attn.softmax 1 1 input.337 14393 dim=-1 #input.337=(36,6,64,64)f32 #14393=(36,6,64,64)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.1.attn.attn_drop 1 1 14393 14394 #14393=(36,6,64,64)f32 #14394=(36,6,64,64)f32 Tensor.select Tensor.select_871 3 1 qkv0.77 24702 24703 v.149 $input=qkv0.77 $dim=24702 $index=24703 #qkv0.77=(3,36,6,64,32)f32 #v.149=(36,6,64,32)f32 prim::Constant pnnx_15052 0 1 24721 value=1 prim::Constant pnnx_15053 0 1 24722 value=2 torch.matmul torch.matmul_2351 2 1 14394 v.149 14395 $input=14394 $other=v.149 #14394=(36,6,64,64)f32 #v.149=(36,6,64,32)f32 #14395=(36,6,64,32)f32 prim::ListConstruct pnnx_15055 3 1 14344 14348 14356 14397 torch.transpose torch.transpose_3136 3 1 14395 24721 24722 14396 $input=14395 $dim0=24721 $dim1=24722 #14395=(36,6,64,32)f32 #14396=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_581 2 1 14396 14397 input0.81 $input=14396 $shape=14397 #14396=(36,64,6,32)f32 #input0.81=(36,64,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.1.attn.proj 1 1 input0.81 14399 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.81=(36,64,192)f32 #14399=(36,64,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.1.attn.proj_drop 1 1 14399 14400 #14399=(36,64,192)f32 #14400=(36,64,192)f32 prim::Constant pnnx_15057 0 1 24723 value=-1 prim::Constant pnnx_15058 0 1 24724 value=8 prim::Constant pnnx_15059 0 1 24725 value=8 prim::ListConstruct pnnx_15060 4 1 24723 24724 24725 14290 14401 prim::Constant pnnx_15062 0 1 24726 value=8 prim::Constant pnnx_15063 0 1 24727 value=trunc aten::div pnnx_15064 3 1 H1.1 24726 24727 14403 aten::Int pnnx_15065 1 1 14403 14404 prim::Constant pnnx_15066 0 1 24728 value=8 prim::Constant pnnx_15067 0 1 24729 value=trunc aten::div pnnx_15068 3 1 W1.1 24728 24729 14405 aten::Int pnnx_15069 1 1 14405 14406 prim::Constant pnnx_15070 0 1 24730 value=1 prim::Constant pnnx_15071 0 1 24731 value=8 prim::Constant pnnx_15072 0 1 24732 value=8 prim::Constant pnnx_15073 0 1 24733 value=-1 prim::ListConstruct pnnx_15074 6 1 24730 14404 14406 24731 24732 24733 14407 prim::Constant pnnx_15076 0 1 24734 value=0 prim::Constant pnnx_15077 0 1 24735 value=1 prim::Constant pnnx_15078 0 1 24736 value=3 prim::Constant pnnx_15079 0 1 24737 value=2 prim::Constant pnnx_15080 0 1 24738 value=4 prim::Constant pnnx_15081 0 1 24739 value=5 prim::ListConstruct pnnx_15082 6 1 24734 24735 24736 24737 24738 24739 14409 Tensor.view Tensor.view_1798 2 1 14400 14401 windows.149 $input=14400 $shape=14401 #14400=(36,64,192)f32 #windows.149=(36,8,8,192)f32 Tensor.view Tensor.view_1799 2 1 windows.149 14407 x3.77 $input=windows.149 $shape=14407 #windows.149=(36,8,8,192)f32 #x3.77=(1,6,6,8,8,192)f32 prim::Constant pnnx_15086 0 1 24741 value=1 prim::Constant pnnx_15087 0 1 24742 value=-1 prim::ListConstruct pnnx_15088 4 1 24741 1762 2002 24742 14412 torch.permute torch.permute_2823 2 1 x3.77 14409 14410 $input=x3.77 $dims=14409 #x3.77=(1,6,6,8,8,192)f32 #14410=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_223 1 1 14410 14411 memory_format=torch.contiguous_format $input=14410 #14410=(1,6,8,6,8,192)f32 #14411=(1,6,8,6,8,192)f32 prim::Constant pnnx_15090 0 1 24743 value=4 prim::Constant pnnx_15091 0 1 24744 value=4 prim::ListConstruct pnnx_15092 2 1 24743 24744 14414 prim::Constant pnnx_15093 0 1 24745 value=1 prim::Constant pnnx_15094 0 1 24746 value=2 prim::ListConstruct pnnx_15095 2 1 24745 24746 14415 Tensor.view Tensor.view_1800 2 1 14411 14412 shifted_x.75 $input=14411 $shape=14412 #14411=(1,6,8,6,8,192)f32 #shifted_x.75=(1,48,48,192)f32 aten::mul pnnx_15097 2 1 H1.1 W1.1 14417 aten::Int pnnx_15098 1 1 14417 14418 prim::ListConstruct pnnx_15099 3 1 14285 14418 14289 14419 prim::Constant pnnx_15101 0 1 14421 value=None prim::Constant pnnx_15102 0 1 24747 value=1 torch.roll torch.roll_2493 3 1 shifted_x.75 14414 14415 x4.77 $input=shifted_x.75 $shifts=14414 $dims=14415 #shifted_x.75=(1,48,48,192)f32 #x4.77=(1,48,48,192)f32 Tensor.view Tensor.view_1801 2 1 x4.77 14419 x5.75 $input=x4.77 $shape=14419 #x4.77=(1,48,48,192)f32 #x5.75=(1,2304,192)f32 aten::add pnnx_15103 3 1 14264 x5.75 24747 input.339 #14264=(1,2304,192)f32 #x5.75=(1,2304,192)f32 #input.339=(1,2304,192)f32 nn.LayerNorm layers_mmsa.0.residual_group.blocks.1.norm2 1 1 input.339 14423 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.339=(1,2304,192)f32 #14423=(1,2304,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.1.mlp.fc1 1 1 14423 14428 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #14423=(1,2304,192)f32 #14428=(1,2304,384)f32 nn.GELU layers_mmsa.0.residual_group.blocks.1.mlp.act 1 1 14428 14429 #14428=(1,2304,384)f32 #14429=(1,2304,384)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.1.mlp.drop 1 1 14429 14430 #14429=(1,2304,384)f32 #14430=(1,2304,384)f32 nn.Linear layers_mmsa.0.residual_group.blocks.1.mlp.fc2 1 1 14430 14431 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #14430=(1,2304,384)f32 #14431=(1,2304,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.1.mlp.drop 1 1 14431 14432 #14431=(1,2304,192)f32 #14432=(1,2304,192)f32 prim::Constant pnnx_15104 0 1 14433 value=None prim::Constant pnnx_15105 0 1 24748 value=1 aten::add pnnx_15106 3 1 input.339 14432 24748 14434 #input.339=(1,2304,192)f32 #14432=(1,2304,192)f32 #14434=(1,2304,192)f32 prim::Constant pnnx_15107 0 1 14435 value=trunc prim::Constant pnnx_15108 0 1 14436 value=8 prim::Constant pnnx_15109 0 1 14437 value=0 prim::Constant pnnx_15110 0 1 14438 value=2 prim::Constant pnnx_15111 0 1 14439 value=1 prim::Constant pnnx_15112 0 1 14440 value=3 prim::Constant pnnx_15113 0 1 14441 value=8 prim::Constant pnnx_15114 0 1 14442 value=4 prim::Constant pnnx_15115 0 1 14443 value=5 prim::Constant pnnx_15116 0 1 14444 value=-1 prim::Constant pnnx_15117 0 1 14445 value=64 aten::size pnnx_15118 2 1 14434 14437 14451 #14434=(1,2304,192)f32 prim::NumToTensor pnnx_15119 1 1 14451 B.181 aten::Int pnnx_15120 1 1 B.181 14453 aten::Int pnnx_15121 1 1 B.181 14454 aten::size pnnx_15122 2 1 14434 14438 14455 #14434=(1,2304,192)f32 prim::NumToTensor pnnx_15123 1 1 14455 C.307 aten::Int pnnx_15124 1 1 C.307 14457 aten::Int pnnx_15125 1 1 C.307 14458 aten::Int pnnx_15126 1 1 C.307 14459 aten::Int pnnx_15127 1 1 C.307 14460 nn.LayerNorm layers_mmsa.0.residual_group.blocks.2.norm1 1 1 14434 14461 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #14434=(1,2304,192)f32 #14461=(1,2304,192)f32 prim::ListConstruct pnnx_15128 4 1 14454 1759 1999 14460 14462 prim::Constant pnnx_15130 0 1 24749 value=0 Tensor.view Tensor.view_1802 2 1 14461 14462 x.151 $input=14461 $shape=14462 #14461=(1,2304,192)f32 #x.151=(1,48,48,192)f32 aten::size pnnx_15131 2 1 x.151 24749 14464 #x.151=(1,48,48,192)f32 prim::NumToTensor pnnx_15132 1 1 14464 B0.79 aten::Int pnnx_15133 1 1 B0.79 14466 aten::size pnnx_15134 2 1 x.151 14439 14467 #x.151=(1,48,48,192)f32 prim::NumToTensor pnnx_15135 1 1 14467 14468 prim::Constant pnnx_15136 0 1 24750 value=2 aten::size pnnx_15137 2 1 x.151 24750 14469 #x.151=(1,48,48,192)f32 prim::NumToTensor pnnx_15138 1 1 14469 14470 aten::size pnnx_15139 2 1 x.151 14440 14471 #x.151=(1,48,48,192)f32 prim::NumToTensor pnnx_15140 1 1 14471 C0.79 aten::Int pnnx_15141 1 1 C0.79 14473 aten::Int pnnx_15142 1 1 C0.79 14474 aten::div pnnx_15143 3 1 14468 14436 14435 14475 aten::Int pnnx_15144 1 1 14475 14476 prim::Constant pnnx_15145 0 1 24751 value=8 prim::Constant pnnx_15146 0 1 24752 value=trunc aten::div pnnx_15147 3 1 14470 24751 24752 14477 aten::Int pnnx_15148 1 1 14477 14478 prim::Constant pnnx_15149 0 1 24753 value=8 prim::ListConstruct pnnx_15150 6 1 14466 14476 14441 14478 24753 14474 14479 prim::Constant pnnx_15152 0 1 24754 value=0 prim::Constant pnnx_15153 0 1 24755 value=1 prim::Constant pnnx_15154 0 1 24756 value=3 prim::Constant pnnx_15155 0 1 24757 value=2 prim::ListConstruct pnnx_15156 6 1 24754 24755 24756 24757 14442 14443 14481 Tensor.view Tensor.view_1803 2 1 x.151 14479 x0.79 $input=x.151 $shape=14479 #x.151=(1,48,48,192)f32 #x0.79=(1,6,8,6,8,192)f32 prim::Constant pnnx_15160 0 1 24759 value=8 prim::Constant pnnx_15161 0 1 24760 value=8 prim::ListConstruct pnnx_15162 4 1 14444 24759 24760 14473 14484 torch.permute torch.permute_2824 2 1 x0.79 14481 14482 $input=x0.79 $dims=14481 #x0.79=(1,6,8,6,8,192)f32 #14482=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_224 1 1 14482 14483 memory_format=torch.contiguous_format $input=14482 #14482=(1,6,6,8,8,192)f32 #14483=(1,6,6,8,8,192)f32 prim::Constant pnnx_15164 0 1 24761 value=-1 prim::ListConstruct pnnx_15165 3 1 24761 14445 14459 14486 prim::Constant pnnx_15167 0 1 14488 value=1.767767e-01 prim::Constant pnnx_15168 0 1 14489 value=trunc prim::Constant pnnx_15169 0 1 14490 value=6 prim::Constant pnnx_15170 0 1 14491 value=0 prim::Constant pnnx_15171 0 1 14492 value=1 prim::Constant pnnx_15172 0 1 14493 value=2 prim::Constant pnnx_15173 0 1 14494 value=3 prim::Constant pnnx_15174 0 1 14495 value=6 prim::Constant pnnx_15175 0 1 14496 value=4 prim::Constant pnnx_15176 0 1 14497 value=-2 prim::Constant pnnx_15177 0 1 14498 value=-1 prim::Constant pnnx_15178 0 1 14499 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.2.attn 0 1 relative_position_bias_table.151 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.151=(225,6)f32 pnnx.Attribute layers_mmsa.0.residual_group.blocks.2.attn 0 1 relative_position_index.151 @relative_position_index=(64,64)i64 #relative_position_index.151=(64,64)i64 Tensor.view Tensor.view_1804 2 1 14483 14484 x_windows.151 $input=14483 $shape=14484 #14483=(1,6,6,8,8,192)f32 #x_windows.151=(36,8,8,192)f32 Tensor.view Tensor.view_1805 2 1 x_windows.151 14486 x1.79 $input=x_windows.151 $shape=14486 #x_windows.151=(36,8,8,192)f32 #x1.79=(36,64,192)f32 aten::size pnnx_15179 2 1 x1.79 14491 14507 #x1.79=(36,64,192)f32 prim::NumToTensor pnnx_15180 1 1 14507 B_.151 aten::Int pnnx_15181 1 1 B_.151 14509 aten::Int pnnx_15182 1 1 B_.151 14510 aten::size pnnx_15183 2 1 x1.79 14492 14511 #x1.79=(36,64,192)f32 prim::NumToTensor pnnx_15184 1 1 14511 N.151 aten::Int pnnx_15185 1 1 N.151 14513 aten::Int pnnx_15186 1 1 N.151 14514 aten::size pnnx_15187 2 1 x1.79 14493 14515 #x1.79=(36,64,192)f32 prim::NumToTensor pnnx_15188 1 1 14515 C.309 aten::Int pnnx_15189 1 1 C.309 14517 nn.Linear layers_mmsa.0.residual_group.blocks.2.attn.qkv 1 1 x1.79 14518 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.79=(36,64,192)f32 #14518=(36,64,576)f32 aten::div pnnx_15190 3 1 C.309 14490 14489 14519 aten::Int pnnx_15191 1 1 14519 14520 prim::ListConstruct pnnx_15192 5 1 14510 14514 14494 14495 14520 14521 prim::Constant pnnx_15194 0 1 24762 value=2 prim::Constant pnnx_15195 0 1 24763 value=0 prim::Constant pnnx_15196 0 1 24764 value=3 prim::Constant pnnx_15197 0 1 24765 value=1 prim::ListConstruct pnnx_15198 5 1 24762 24763 24764 24765 14496 14523 Tensor.reshape Tensor.reshape_582 2 1 14518 14521 14522 $input=14518 $shape=14521 #14518=(36,64,576)f32 #14522=(36,64,3,6,32)f32 prim::Constant pnnx_15200 0 1 24766 value=0 prim::Constant pnnx_15201 0 1 24767 value=0 prim::Constant pnnx_15203 0 1 24768 value=0 prim::Constant pnnx_15204 0 1 24769 value=1 prim::Constant pnnx_15206 0 1 24770 value=0 prim::Constant pnnx_15207 0 1 24771 value=2 torch.permute torch.permute_2825 2 1 14522 14523 qkv0.79 $input=14522 $dims=14523 #14522=(36,64,3,6,32)f32 #qkv0.79=(3,36,6,64,32)f32 Tensor.select Tensor.select_872 3 1 qkv0.79 24766 24767 q.151 $input=qkv0.79 $dim=24766 $index=24767 #qkv0.79=(3,36,6,64,32)f32 #q.151=(36,6,64,32)f32 aten::mul pnnx_15209 2 1 q.151 14488 q0.79 #q.151=(36,6,64,32)f32 #q0.79=(36,6,64,32)f32 Tensor.select Tensor.select_873 3 1 qkv0.79 24768 24769 k.151 $input=qkv0.79 $dim=24768 $index=24769 #qkv0.79=(3,36,6,64,32)f32 #k.151=(36,6,64,32)f32 prim::Constant pnnx_15212 0 1 24772 value=-1 prim::ListConstruct pnnx_15213 1 1 24772 14531 Tensor.view Tensor.view_1806 2 1 relative_position_index.151 14531 14532 $input=relative_position_index.151 $shape=14531 #relative_position_index.151=(64,64)i64 #14532=(4096)i64 prim::ListConstruct pnnx_15215 1 1 14532 14533 #14532=(4096)i64 prim::Constant pnnx_15217 0 1 24773 value=64 prim::Constant pnnx_15218 0 1 24774 value=-1 prim::ListConstruct pnnx_15219 3 1 14499 24773 24774 14535 Tensor.index Tensor.index_400 2 1 relative_position_bias_table.151 14533 14534 $input=relative_position_bias_table.151 $expr=14533 #relative_position_bias_table.151=(225,6)f32 #14534=(4096,6)f32 prim::Constant pnnx_15221 0 1 24775 value=2 prim::Constant pnnx_15222 0 1 24776 value=0 prim::Constant pnnx_15223 0 1 24777 value=1 prim::ListConstruct pnnx_15224 3 1 24775 24776 24777 14537 Tensor.view Tensor.view_1807 2 1 14534 14535 relative_position_bias.151 $input=14534 $shape=14535 #14534=(4096,6)f32 #relative_position_bias.151=(64,64,6)f32 prim::Constant pnnx_15228 0 1 24779 value=0 torch.permute torch.permute_2826 2 1 relative_position_bias.151 14537 14538 $input=relative_position_bias.151 $dims=14537 #relative_position_bias.151=(64,64,6)f32 #14538=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_225 1 1 14538 relative_position_bias0.79 memory_format=torch.contiguous_format $input=14538 #14538=(6,64,64)f32 #relative_position_bias0.79=(6,64,64)f32 prim::Constant pnnx_15230 0 1 24780 value=1 torch.transpose torch.transpose_3137 3 1 k.151 14497 14498 14529 $input=k.151 $dim0=14497 $dim1=14498 #k.151=(36,6,64,32)f32 #14529=(36,6,32,64)f32 torch.matmul torch.matmul_2352 2 1 q0.79 14529 attn.303 $input=q0.79 $other=14529 #q0.79=(36,6,64,32)f32 #14529=(36,6,32,64)f32 #attn.303=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3366 2 1 relative_position_bias0.79 24779 14540 $input=relative_position_bias0.79 $dim=24779 #relative_position_bias0.79=(6,64,64)f32 #14540=(1,6,64,64)f32 aten::add pnnx_15231 3 1 attn.303 14540 24780 input.341 #attn.303=(36,6,64,64)f32 #14540=(1,6,64,64)f32 #input.341=(36,6,64,64)f32 nn.Softmax layers_mmsa.0.residual_group.blocks.2.attn.softmax 1 1 input.341 14542 dim=-1 #input.341=(36,6,64,64)f32 #14542=(36,6,64,64)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.2.attn.attn_drop 1 1 14542 14543 #14542=(36,6,64,64)f32 #14543=(36,6,64,64)f32 Tensor.select Tensor.select_874 3 1 qkv0.79 24770 24771 v.151 $input=qkv0.79 $dim=24770 $index=24771 #qkv0.79=(3,36,6,64,32)f32 #v.151=(36,6,64,32)f32 prim::Constant pnnx_15233 0 1 24781 value=1 prim::Constant pnnx_15234 0 1 24782 value=2 torch.matmul torch.matmul_2353 2 1 14543 v.151 14544 $input=14543 $other=v.151 #14543=(36,6,64,64)f32 #v.151=(36,6,64,32)f32 #14544=(36,6,64,32)f32 prim::ListConstruct pnnx_15236 3 1 14509 14513 14517 14546 torch.transpose torch.transpose_3138 3 1 14544 24781 24782 14545 $input=14544 $dim0=24781 $dim1=24782 #14544=(36,6,64,32)f32 #14545=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_583 2 1 14545 14546 input0.83 $input=14545 $shape=14546 #14545=(36,64,6,32)f32 #input0.83=(36,64,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.2.attn.proj 1 1 input0.83 14548 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.83=(36,64,192)f32 #14548=(36,64,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.2.attn.proj_drop 1 1 14548 14549 #14548=(36,64,192)f32 #14549=(36,64,192)f32 prim::Constant pnnx_15238 0 1 24783 value=-1 prim::Constant pnnx_15239 0 1 24784 value=8 prim::Constant pnnx_15240 0 1 24785 value=8 prim::ListConstruct pnnx_15241 4 1 24783 24784 24785 14458 14550 prim::Constant pnnx_15243 0 1 24786 value=8 prim::Constant pnnx_15244 0 1 24787 value=trunc aten::div pnnx_15245 3 1 H1.1 24786 24787 14552 aten::Int pnnx_15246 1 1 14552 14553 prim::Constant pnnx_15247 0 1 24788 value=8 prim::Constant pnnx_15248 0 1 24789 value=trunc aten::div pnnx_15249 3 1 W1.1 24788 24789 14554 aten::Int pnnx_15250 1 1 14554 14555 prim::Constant pnnx_15251 0 1 24790 value=1 prim::Constant pnnx_15252 0 1 24791 value=8 prim::Constant pnnx_15253 0 1 24792 value=8 prim::Constant pnnx_15254 0 1 24793 value=-1 prim::ListConstruct pnnx_15255 6 1 24790 14553 14555 24791 24792 24793 14556 prim::Constant pnnx_15257 0 1 24794 value=0 prim::Constant pnnx_15258 0 1 24795 value=1 prim::Constant pnnx_15259 0 1 24796 value=3 prim::Constant pnnx_15260 0 1 24797 value=2 prim::Constant pnnx_15261 0 1 24798 value=4 prim::Constant pnnx_15262 0 1 24799 value=5 prim::ListConstruct pnnx_15263 6 1 24794 24795 24796 24797 24798 24799 14558 Tensor.view Tensor.view_1808 2 1 14549 14550 windows.151 $input=14549 $shape=14550 #14549=(36,64,192)f32 #windows.151=(36,8,8,192)f32 Tensor.view Tensor.view_1809 2 1 windows.151 14556 x2.79 $input=windows.151 $shape=14556 #windows.151=(36,8,8,192)f32 #x2.79=(1,6,6,8,8,192)f32 prim::Constant pnnx_15267 0 1 24801 value=1 prim::Constant pnnx_15268 0 1 24802 value=-1 prim::ListConstruct pnnx_15269 4 1 24801 1756 1996 24802 14561 torch.permute torch.permute_2827 2 1 x2.79 14558 14559 $input=x2.79 $dims=14558 #x2.79=(1,6,6,8,8,192)f32 #14559=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_226 1 1 14559 14560 memory_format=torch.contiguous_format $input=14559 #14559=(1,6,8,6,8,192)f32 #14560=(1,6,8,6,8,192)f32 aten::mul pnnx_15271 2 1 H1.1 W1.1 14563 aten::Int pnnx_15272 1 1 14563 14564 prim::ListConstruct pnnx_15273 3 1 14453 14564 14457 14565 prim::Constant pnnx_15275 0 1 14567 value=None prim::Constant pnnx_15276 0 1 24803 value=1 Tensor.view Tensor.view_1810 2 1 14560 14561 x3.79 $input=14560 $shape=14561 #14560=(1,6,8,6,8,192)f32 #x3.79=(1,48,48,192)f32 Tensor.view Tensor.view_1811 2 1 x3.79 14565 x4.79 $input=x3.79 $shape=14565 #x3.79=(1,48,48,192)f32 #x4.79=(1,2304,192)f32 aten::add pnnx_15277 3 1 14434 x4.79 24803 input.343 #14434=(1,2304,192)f32 #x4.79=(1,2304,192)f32 #input.343=(1,2304,192)f32 nn.LayerNorm layers_mmsa.0.residual_group.blocks.2.norm2 1 1 input.343 14569 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.343=(1,2304,192)f32 #14569=(1,2304,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.2.mlp.fc1 1 1 14569 14574 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #14569=(1,2304,192)f32 #14574=(1,2304,384)f32 nn.GELU layers_mmsa.0.residual_group.blocks.2.mlp.act 1 1 14574 14575 #14574=(1,2304,384)f32 #14575=(1,2304,384)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.2.mlp.drop 1 1 14575 14576 #14575=(1,2304,384)f32 #14576=(1,2304,384)f32 nn.Linear layers_mmsa.0.residual_group.blocks.2.mlp.fc2 1 1 14576 14577 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #14576=(1,2304,384)f32 #14577=(1,2304,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.2.mlp.drop 1 1 14577 14578 #14577=(1,2304,192)f32 #14578=(1,2304,192)f32 prim::Constant pnnx_15278 0 1 14579 value=None prim::Constant pnnx_15279 0 1 24804 value=1 aten::add pnnx_15280 3 1 input.343 14578 24804 14580 #input.343=(1,2304,192)f32 #14578=(1,2304,192)f32 #14580=(1,2304,192)f32 prim::Constant pnnx_15281 0 1 14581 value=trunc prim::Constant pnnx_15282 0 1 14582 value=8 prim::Constant pnnx_15283 0 1 14583 value=0 prim::Constant pnnx_15284 0 1 14584 value=2 prim::Constant pnnx_15285 0 1 14585 value=-4 prim::Constant pnnx_15286 0 1 14586 value=1 prim::Constant pnnx_15287 0 1 14587 value=3 prim::Constant pnnx_15288 0 1 14588 value=8 prim::Constant pnnx_15289 0 1 14589 value=4 prim::Constant pnnx_15290 0 1 14590 value=5 prim::Constant pnnx_15291 0 1 14591 value=-1 prim::Constant pnnx_15292 0 1 14592 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.3 0 1 attn_mask.77 @attn_mask=(36,64,64)f32 #attn_mask.77=(36,64,64)f32 aten::size pnnx_15293 2 1 14580 14583 14599 #14580=(1,2304,192)f32 prim::NumToTensor pnnx_15294 1 1 14599 B.183 aten::Int pnnx_15295 1 1 B.183 14601 aten::Int pnnx_15296 1 1 B.183 14602 aten::size pnnx_15297 2 1 14580 14584 14603 #14580=(1,2304,192)f32 prim::NumToTensor pnnx_15298 1 1 14603 C.311 aten::Int pnnx_15299 1 1 C.311 14605 aten::Int pnnx_15300 1 1 C.311 14606 aten::Int pnnx_15301 1 1 C.311 14607 aten::Int pnnx_15302 1 1 C.311 14608 nn.LayerNorm layers_mmsa.0.residual_group.blocks.3.norm1 1 1 14580 14609 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #14580=(1,2304,192)f32 #14609=(1,2304,192)f32 prim::ListConstruct pnnx_15303 4 1 14602 1753 1993 14608 14610 prim::Constant pnnx_15305 0 1 24805 value=-4 prim::ListConstruct pnnx_15306 2 1 14585 24805 14612 prim::Constant pnnx_15307 0 1 24806 value=2 prim::ListConstruct pnnx_15308 2 1 14586 24806 14613 Tensor.view Tensor.view_1812 2 1 14609 14610 x.153 $input=14609 $shape=14610 #14609=(1,2304,192)f32 #x.153=(1,48,48,192)f32 prim::Constant pnnx_15310 0 1 24807 value=0 torch.roll torch.roll_2494 3 1 x.153 14612 14613 x0.81 $input=x.153 $shifts=14612 $dims=14613 #x.153=(1,48,48,192)f32 #x0.81=(1,48,48,192)f32 aten::size pnnx_15311 2 1 x0.81 24807 14615 #x0.81=(1,48,48,192)f32 prim::NumToTensor pnnx_15312 1 1 14615 B0.81 aten::Int pnnx_15313 1 1 B0.81 14617 prim::Constant pnnx_15314 0 1 24808 value=1 aten::size pnnx_15315 2 1 x0.81 24808 14618 #x0.81=(1,48,48,192)f32 prim::NumToTensor pnnx_15316 1 1 14618 14619 prim::Constant pnnx_15317 0 1 24809 value=2 aten::size pnnx_15318 2 1 x0.81 24809 14620 #x0.81=(1,48,48,192)f32 prim::NumToTensor pnnx_15319 1 1 14620 14621 aten::size pnnx_15320 2 1 x0.81 14587 14622 #x0.81=(1,48,48,192)f32 prim::NumToTensor pnnx_15321 1 1 14622 C0.81 aten::Int pnnx_15322 1 1 C0.81 14624 aten::Int pnnx_15323 1 1 C0.81 14625 aten::div pnnx_15324 3 1 14619 14582 14581 14626 aten::Int pnnx_15325 1 1 14626 14627 prim::Constant pnnx_15326 0 1 24810 value=8 prim::Constant pnnx_15327 0 1 24811 value=trunc aten::div pnnx_15328 3 1 14621 24810 24811 14628 aten::Int pnnx_15329 1 1 14628 14629 prim::Constant pnnx_15330 0 1 24812 value=8 prim::ListConstruct pnnx_15331 6 1 14617 14627 14588 14629 24812 14625 14630 prim::Constant pnnx_15333 0 1 24813 value=0 prim::Constant pnnx_15334 0 1 24814 value=1 prim::Constant pnnx_15335 0 1 24815 value=3 prim::Constant pnnx_15336 0 1 24816 value=2 prim::ListConstruct pnnx_15337 6 1 24813 24814 24815 24816 14589 14590 14632 Tensor.view Tensor.view_1813 2 1 x0.81 14630 x1.81 $input=x0.81 $shape=14630 #x0.81=(1,48,48,192)f32 #x1.81=(1,6,8,6,8,192)f32 prim::Constant pnnx_15341 0 1 24818 value=8 prim::Constant pnnx_15342 0 1 24819 value=8 prim::ListConstruct pnnx_15343 4 1 14591 24818 24819 14624 14635 torch.permute torch.permute_2828 2 1 x1.81 14632 14633 $input=x1.81 $dims=14632 #x1.81=(1,6,8,6,8,192)f32 #14633=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_227 1 1 14633 14634 memory_format=torch.contiguous_format $input=14633 #14633=(1,6,6,8,8,192)f32 #14634=(1,6,6,8,8,192)f32 prim::Constant pnnx_15345 0 1 24820 value=-1 prim::ListConstruct pnnx_15346 3 1 24820 14592 14607 14637 prim::Constant pnnx_15348 0 1 14639 value=1.767767e-01 prim::Constant pnnx_15349 0 1 14640 value=trunc prim::Constant pnnx_15350 0 1 14641 value=6 prim::Constant pnnx_15351 0 1 14642 value=0 prim::Constant pnnx_15352 0 1 14643 value=1 prim::Constant pnnx_15353 0 1 14644 value=2 prim::Constant pnnx_15354 0 1 14645 value=3 prim::Constant pnnx_15355 0 1 14646 value=6 prim::Constant pnnx_15356 0 1 14647 value=4 prim::Constant pnnx_15357 0 1 14648 value=-2 prim::Constant pnnx_15358 0 1 14649 value=-1 prim::Constant pnnx_15359 0 1 14650 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.3.attn 0 1 relative_position_bias_table.153 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.153=(225,6)f32 pnnx.Attribute layers_mmsa.0.residual_group.blocks.3.attn 0 1 relative_position_index.153 @relative_position_index=(64,64)i64 #relative_position_index.153=(64,64)i64 Tensor.view Tensor.view_1814 2 1 14634 14635 x_windows.153 $input=14634 $shape=14635 #14634=(1,6,6,8,8,192)f32 #x_windows.153=(36,8,8,192)f32 Tensor.view Tensor.view_1815 2 1 x_windows.153 14637 x2.81 $input=x_windows.153 $shape=14637 #x_windows.153=(36,8,8,192)f32 #x2.81=(36,64,192)f32 aten::size pnnx_15360 2 1 x2.81 14642 14658 #x2.81=(36,64,192)f32 prim::NumToTensor pnnx_15361 1 1 14658 B_.153 aten::Int pnnx_15362 1 1 B_.153 14660 aten::Int pnnx_15363 1 1 B_.153 14661 aten::size pnnx_15364 2 1 x2.81 14643 14662 #x2.81=(36,64,192)f32 prim::NumToTensor pnnx_15365 1 1 14662 N.153 aten::Int pnnx_15366 1 1 N.153 14664 aten::Int pnnx_15367 1 1 N.153 14665 aten::Int pnnx_15368 1 1 N.153 14666 aten::Int pnnx_15369 1 1 N.153 14667 aten::Int pnnx_15370 1 1 N.153 14668 aten::Int pnnx_15371 1 1 N.153 14669 aten::size pnnx_15372 2 1 x2.81 14644 14670 #x2.81=(36,64,192)f32 prim::NumToTensor pnnx_15373 1 1 14670 C.313 aten::Int pnnx_15374 1 1 C.313 14672 nn.Linear layers_mmsa.0.residual_group.blocks.3.attn.qkv 1 1 x2.81 14673 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.81=(36,64,192)f32 #14673=(36,64,576)f32 aten::div pnnx_15375 3 1 C.313 14641 14640 14674 aten::Int pnnx_15376 1 1 14674 14675 prim::ListConstruct pnnx_15377 5 1 14661 14669 14645 14646 14675 14676 prim::Constant pnnx_15379 0 1 24821 value=2 prim::Constant pnnx_15380 0 1 24822 value=0 prim::Constant pnnx_15381 0 1 24823 value=3 prim::Constant pnnx_15382 0 1 24824 value=1 prim::ListConstruct pnnx_15383 5 1 24821 24822 24823 24824 14647 14678 Tensor.reshape Tensor.reshape_584 2 1 14673 14676 14677 $input=14673 $shape=14676 #14673=(36,64,576)f32 #14677=(36,64,3,6,32)f32 prim::Constant pnnx_15385 0 1 24825 value=0 prim::Constant pnnx_15386 0 1 24826 value=0 prim::Constant pnnx_15388 0 1 24827 value=0 prim::Constant pnnx_15389 0 1 24828 value=1 prim::Constant pnnx_15391 0 1 24829 value=0 prim::Constant pnnx_15392 0 1 24830 value=2 torch.permute torch.permute_2829 2 1 14677 14678 qkv0.81 $input=14677 $dims=14678 #14677=(36,64,3,6,32)f32 #qkv0.81=(3,36,6,64,32)f32 Tensor.select Tensor.select_875 3 1 qkv0.81 24825 24826 q.153 $input=qkv0.81 $dim=24825 $index=24826 #qkv0.81=(3,36,6,64,32)f32 #q.153=(36,6,64,32)f32 aten::mul pnnx_15394 2 1 q.153 14639 q0.81 #q.153=(36,6,64,32)f32 #q0.81=(36,6,64,32)f32 Tensor.select Tensor.select_876 3 1 qkv0.81 24827 24828 k.153 $input=qkv0.81 $dim=24827 $index=24828 #qkv0.81=(3,36,6,64,32)f32 #k.153=(36,6,64,32)f32 prim::Constant pnnx_15397 0 1 24831 value=-1 prim::ListConstruct pnnx_15398 1 1 24831 14686 Tensor.view Tensor.view_1816 2 1 relative_position_index.153 14686 14687 $input=relative_position_index.153 $shape=14686 #relative_position_index.153=(64,64)i64 #14687=(4096)i64 prim::ListConstruct pnnx_15400 1 1 14687 14688 #14687=(4096)i64 prim::Constant pnnx_15402 0 1 24832 value=64 prim::Constant pnnx_15403 0 1 24833 value=-1 prim::ListConstruct pnnx_15404 3 1 14650 24832 24833 14690 Tensor.index Tensor.index_401 2 1 relative_position_bias_table.153 14688 14689 $input=relative_position_bias_table.153 $expr=14688 #relative_position_bias_table.153=(225,6)f32 #14689=(4096,6)f32 prim::Constant pnnx_15406 0 1 24834 value=2 prim::Constant pnnx_15407 0 1 24835 value=0 prim::Constant pnnx_15408 0 1 24836 value=1 prim::ListConstruct pnnx_15409 3 1 24834 24835 24836 14692 Tensor.view Tensor.view_1817 2 1 14689 14690 relative_position_bias.153 $input=14689 $shape=14690 #14689=(4096,6)f32 #relative_position_bias.153=(64,64,6)f32 prim::Constant pnnx_15413 0 1 24838 value=0 torch.permute torch.permute_2830 2 1 relative_position_bias.153 14692 14693 $input=relative_position_bias.153 $dims=14692 #relative_position_bias.153=(64,64,6)f32 #14693=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_228 1 1 14693 relative_position_bias0.81 memory_format=torch.contiguous_format $input=14693 #14693=(6,64,64)f32 #relative_position_bias0.81=(6,64,64)f32 prim::Constant pnnx_15415 0 1 24839 value=1 torch.transpose torch.transpose_3139 3 1 k.153 14648 14649 14684 $input=k.153 $dim0=14648 $dim1=14649 #k.153=(36,6,64,32)f32 #14684=(36,6,32,64)f32 torch.matmul torch.matmul_2354 2 1 q0.81 14684 attn.307 $input=q0.81 $other=14684 #q0.81=(36,6,64,32)f32 #14684=(36,6,32,64)f32 #attn.307=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3367 2 1 relative_position_bias0.81 24838 14695 $input=relative_position_bias0.81 $dim=24838 #relative_position_bias0.81=(6,64,64)f32 #14695=(1,6,64,64)f32 aten::add pnnx_15416 3 1 attn.307 14695 24839 attn0.41 #attn.307=(36,6,64,64)f32 #14695=(1,6,64,64)f32 #attn0.41=(36,6,64,64)f32 prim::Constant pnnx_15417 0 1 24840 value=0 aten::size pnnx_15418 2 1 attn_mask.77 24840 14697 #attn_mask.77=(36,64,64)f32 prim::NumToTensor pnnx_15419 1 1 14697 other.77 aten::Int pnnx_15420 1 1 other.77 14699 prim::Constant pnnx_15421 0 1 24841 value=trunc aten::div pnnx_15422 3 1 B_.153 other.77 24841 14700 aten::Int pnnx_15423 1 1 14700 14701 prim::Constant pnnx_15424 0 1 24842 value=6 prim::ListConstruct pnnx_15425 5 1 14701 14699 24842 14668 14667 14702 prim::Constant pnnx_15427 0 1 24843 value=1 prim::Constant pnnx_15429 0 1 24844 value=0 prim::Constant pnnx_15431 0 1 24845 value=1 Tensor.view Tensor.view_1818 2 1 attn0.41 14702 14703 $input=attn0.41 $shape=14702 #attn0.41=(36,6,64,64)f32 #14703=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3368 2 1 attn_mask.77 24843 14704 $input=attn_mask.77 $dim=24843 #attn_mask.77=(36,64,64)f32 #14704=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3369 2 1 14704 24844 14705 $input=14704 $dim=24844 #14704=(36,1,64,64)f32 #14705=(1,36,1,64,64)f32 aten::add pnnx_15432 3 1 14703 14705 24845 attn1.41 #14703=(1,36,6,64,64)f32 #14705=(1,36,1,64,64)f32 #attn1.41=(1,36,6,64,64)f32 prim::Constant pnnx_15433 0 1 24846 value=-1 prim::Constant pnnx_15434 0 1 24847 value=6 prim::ListConstruct pnnx_15435 4 1 24846 24847 14666 14665 14707 Tensor.view Tensor.view_1819 2 1 attn1.41 14707 input.345 $input=attn1.41 $shape=14707 #attn1.41=(1,36,6,64,64)f32 #input.345=(36,6,64,64)f32 nn.Softmax layers_mmsa.0.residual_group.blocks.3.attn.softmax 1 1 input.345 14709 dim=-1 #input.345=(36,6,64,64)f32 #14709=(36,6,64,64)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.3.attn.attn_drop 1 1 14709 14710 #14709=(36,6,64,64)f32 #14710=(36,6,64,64)f32 Tensor.select Tensor.select_877 3 1 qkv0.81 24829 24830 v.153 $input=qkv0.81 $dim=24829 $index=24830 #qkv0.81=(3,36,6,64,32)f32 #v.153=(36,6,64,32)f32 prim::Constant pnnx_15438 0 1 24848 value=1 prim::Constant pnnx_15439 0 1 24849 value=2 torch.matmul torch.matmul_2355 2 1 14710 v.153 14711 $input=14710 $other=v.153 #14710=(36,6,64,64)f32 #v.153=(36,6,64,32)f32 #14711=(36,6,64,32)f32 prim::ListConstruct pnnx_15441 3 1 14660 14664 14672 14713 torch.transpose torch.transpose_3140 3 1 14711 24848 24849 14712 $input=14711 $dim0=24848 $dim1=24849 #14711=(36,6,64,32)f32 #14712=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_585 2 1 14712 14713 input0.85 $input=14712 $shape=14713 #14712=(36,64,6,32)f32 #input0.85=(36,64,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.3.attn.proj 1 1 input0.85 14715 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.85=(36,64,192)f32 #14715=(36,64,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.3.attn.proj_drop 1 1 14715 14716 #14715=(36,64,192)f32 #14716=(36,64,192)f32 prim::Constant pnnx_15443 0 1 24850 value=-1 prim::Constant pnnx_15444 0 1 24851 value=8 prim::Constant pnnx_15445 0 1 24852 value=8 prim::ListConstruct pnnx_15446 4 1 24850 24851 24852 14606 14717 prim::Constant pnnx_15448 0 1 24853 value=8 prim::Constant pnnx_15449 0 1 24854 value=trunc aten::div pnnx_15450 3 1 H1.1 24853 24854 14719 aten::Int pnnx_15451 1 1 14719 14720 prim::Constant pnnx_15452 0 1 24855 value=8 prim::Constant pnnx_15453 0 1 24856 value=trunc aten::div pnnx_15454 3 1 W1.1 24855 24856 14721 aten::Int pnnx_15455 1 1 14721 14722 prim::Constant pnnx_15456 0 1 24857 value=1 prim::Constant pnnx_15457 0 1 24858 value=8 prim::Constant pnnx_15458 0 1 24859 value=8 prim::Constant pnnx_15459 0 1 24860 value=-1 prim::ListConstruct pnnx_15460 6 1 24857 14720 14722 24858 24859 24860 14723 prim::Constant pnnx_15462 0 1 24861 value=0 prim::Constant pnnx_15463 0 1 24862 value=1 prim::Constant pnnx_15464 0 1 24863 value=3 prim::Constant pnnx_15465 0 1 24864 value=2 prim::Constant pnnx_15466 0 1 24865 value=4 prim::Constant pnnx_15467 0 1 24866 value=5 prim::ListConstruct pnnx_15468 6 1 24861 24862 24863 24864 24865 24866 14725 Tensor.view Tensor.view_1820 2 1 14716 14717 windows.153 $input=14716 $shape=14717 #14716=(36,64,192)f32 #windows.153=(36,8,8,192)f32 Tensor.view Tensor.view_1821 2 1 windows.153 14723 x3.81 $input=windows.153 $shape=14723 #windows.153=(36,8,8,192)f32 #x3.81=(1,6,6,8,8,192)f32 prim::Constant pnnx_15472 0 1 24868 value=1 prim::Constant pnnx_15473 0 1 24869 value=-1 prim::ListConstruct pnnx_15474 4 1 24868 1750 1990 24869 14728 torch.permute torch.permute_2831 2 1 x3.81 14725 14726 $input=x3.81 $dims=14725 #x3.81=(1,6,6,8,8,192)f32 #14726=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_229 1 1 14726 14727 memory_format=torch.contiguous_format $input=14726 #14726=(1,6,8,6,8,192)f32 #14727=(1,6,8,6,8,192)f32 prim::Constant pnnx_15476 0 1 24870 value=4 prim::Constant pnnx_15477 0 1 24871 value=4 prim::ListConstruct pnnx_15478 2 1 24870 24871 14730 prim::Constant pnnx_15479 0 1 24872 value=1 prim::Constant pnnx_15480 0 1 24873 value=2 prim::ListConstruct pnnx_15481 2 1 24872 24873 14731 Tensor.view Tensor.view_1822 2 1 14727 14728 shifted_x.77 $input=14727 $shape=14728 #14727=(1,6,8,6,8,192)f32 #shifted_x.77=(1,48,48,192)f32 aten::mul pnnx_15483 2 1 H1.1 W1.1 14733 aten::Int pnnx_15484 1 1 14733 14734 prim::ListConstruct pnnx_15485 3 1 14601 14734 14605 14735 prim::Constant pnnx_15487 0 1 14737 value=None prim::Constant pnnx_15488 0 1 24874 value=1 torch.roll torch.roll_2495 3 1 shifted_x.77 14730 14731 x4.81 $input=shifted_x.77 $shifts=14730 $dims=14731 #shifted_x.77=(1,48,48,192)f32 #x4.81=(1,48,48,192)f32 Tensor.view Tensor.view_1823 2 1 x4.81 14735 x5.77 $input=x4.81 $shape=14735 #x4.81=(1,48,48,192)f32 #x5.77=(1,2304,192)f32 aten::add pnnx_15489 3 1 14580 x5.77 24874 input.347 #14580=(1,2304,192)f32 #x5.77=(1,2304,192)f32 #input.347=(1,2304,192)f32 nn.LayerNorm layers_mmsa.0.residual_group.blocks.3.norm2 1 1 input.347 14739 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.347=(1,2304,192)f32 #14739=(1,2304,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.3.mlp.fc1 1 1 14739 14744 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #14739=(1,2304,192)f32 #14744=(1,2304,384)f32 nn.GELU layers_mmsa.0.residual_group.blocks.3.mlp.act 1 1 14744 14745 #14744=(1,2304,384)f32 #14745=(1,2304,384)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.3.mlp.drop 1 1 14745 14746 #14745=(1,2304,384)f32 #14746=(1,2304,384)f32 nn.Linear layers_mmsa.0.residual_group.blocks.3.mlp.fc2 1 1 14746 14747 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #14746=(1,2304,384)f32 #14747=(1,2304,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.3.mlp.drop 1 1 14747 14748 #14747=(1,2304,192)f32 #14748=(1,2304,192)f32 prim::Constant pnnx_15490 0 1 14749 value=None prim::Constant pnnx_15491 0 1 24875 value=1 aten::add pnnx_15492 3 1 input.347 14748 24875 14750 #input.347=(1,2304,192)f32 #14748=(1,2304,192)f32 #14750=(1,2304,192)f32 prim::Constant pnnx_15493 0 1 14751 value=trunc prim::Constant pnnx_15494 0 1 14752 value=8 prim::Constant pnnx_15495 0 1 14753 value=0 prim::Constant pnnx_15496 0 1 14754 value=2 prim::Constant pnnx_15497 0 1 14755 value=1 prim::Constant pnnx_15498 0 1 14756 value=3 prim::Constant pnnx_15499 0 1 14757 value=8 prim::Constant pnnx_15500 0 1 14758 value=4 prim::Constant pnnx_15501 0 1 14759 value=5 prim::Constant pnnx_15502 0 1 14760 value=-1 prim::Constant pnnx_15503 0 1 14761 value=64 aten::size pnnx_15504 2 1 14750 14753 14767 #14750=(1,2304,192)f32 prim::NumToTensor pnnx_15505 1 1 14767 B.185 aten::Int pnnx_15506 1 1 B.185 14769 aten::Int pnnx_15507 1 1 B.185 14770 aten::size pnnx_15508 2 1 14750 14754 14771 #14750=(1,2304,192)f32 prim::NumToTensor pnnx_15509 1 1 14771 C.315 aten::Int pnnx_15510 1 1 C.315 14773 aten::Int pnnx_15511 1 1 C.315 14774 aten::Int pnnx_15512 1 1 C.315 14775 aten::Int pnnx_15513 1 1 C.315 14776 nn.LayerNorm layers_mmsa.0.residual_group.blocks.4.norm1 1 1 14750 14777 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #14750=(1,2304,192)f32 #14777=(1,2304,192)f32 prim::ListConstruct pnnx_15514 4 1 14770 1747 1987 14776 14778 prim::Constant pnnx_15516 0 1 24876 value=0 Tensor.view Tensor.view_1824 2 1 14777 14778 x.155 $input=14777 $shape=14778 #14777=(1,2304,192)f32 #x.155=(1,48,48,192)f32 aten::size pnnx_15517 2 1 x.155 24876 14780 #x.155=(1,48,48,192)f32 prim::NumToTensor pnnx_15518 1 1 14780 B0.83 aten::Int pnnx_15519 1 1 B0.83 14782 aten::size pnnx_15520 2 1 x.155 14755 14783 #x.155=(1,48,48,192)f32 prim::NumToTensor pnnx_15521 1 1 14783 14784 prim::Constant pnnx_15522 0 1 24877 value=2 aten::size pnnx_15523 2 1 x.155 24877 14785 #x.155=(1,48,48,192)f32 prim::NumToTensor pnnx_15524 1 1 14785 14786 aten::size pnnx_15525 2 1 x.155 14756 14787 #x.155=(1,48,48,192)f32 prim::NumToTensor pnnx_15526 1 1 14787 C0.83 aten::Int pnnx_15527 1 1 C0.83 14789 aten::Int pnnx_15528 1 1 C0.83 14790 aten::div pnnx_15529 3 1 14784 14752 14751 14791 aten::Int pnnx_15530 1 1 14791 14792 prim::Constant pnnx_15531 0 1 24878 value=8 prim::Constant pnnx_15532 0 1 24879 value=trunc aten::div pnnx_15533 3 1 14786 24878 24879 14793 aten::Int pnnx_15534 1 1 14793 14794 prim::Constant pnnx_15535 0 1 24880 value=8 prim::ListConstruct pnnx_15536 6 1 14782 14792 14757 14794 24880 14790 14795 prim::Constant pnnx_15538 0 1 24881 value=0 prim::Constant pnnx_15539 0 1 24882 value=1 prim::Constant pnnx_15540 0 1 24883 value=3 prim::Constant pnnx_15541 0 1 24884 value=2 prim::ListConstruct pnnx_15542 6 1 24881 24882 24883 24884 14758 14759 14797 Tensor.view Tensor.view_1825 2 1 x.155 14795 x0.83 $input=x.155 $shape=14795 #x.155=(1,48,48,192)f32 #x0.83=(1,6,8,6,8,192)f32 prim::Constant pnnx_15546 0 1 24886 value=8 prim::Constant pnnx_15547 0 1 24887 value=8 prim::ListConstruct pnnx_15548 4 1 14760 24886 24887 14789 14800 torch.permute torch.permute_2832 2 1 x0.83 14797 14798 $input=x0.83 $dims=14797 #x0.83=(1,6,8,6,8,192)f32 #14798=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_230 1 1 14798 14799 memory_format=torch.contiguous_format $input=14798 #14798=(1,6,6,8,8,192)f32 #14799=(1,6,6,8,8,192)f32 prim::Constant pnnx_15550 0 1 24888 value=-1 prim::ListConstruct pnnx_15551 3 1 24888 14761 14775 14802 prim::Constant pnnx_15553 0 1 14804 value=1.767767e-01 prim::Constant pnnx_15554 0 1 14805 value=trunc prim::Constant pnnx_15555 0 1 14806 value=6 prim::Constant pnnx_15556 0 1 14807 value=0 prim::Constant pnnx_15557 0 1 14808 value=1 prim::Constant pnnx_15558 0 1 14809 value=2 prim::Constant pnnx_15559 0 1 14810 value=3 prim::Constant pnnx_15560 0 1 14811 value=6 prim::Constant pnnx_15561 0 1 14812 value=4 prim::Constant pnnx_15562 0 1 14813 value=-2 prim::Constant pnnx_15563 0 1 14814 value=-1 prim::Constant pnnx_15564 0 1 14815 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.4.attn 0 1 relative_position_bias_table.155 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.155=(225,6)f32 pnnx.Attribute layers_mmsa.0.residual_group.blocks.4.attn 0 1 relative_position_index.155 @relative_position_index=(64,64)i64 #relative_position_index.155=(64,64)i64 Tensor.view Tensor.view_1826 2 1 14799 14800 x_windows.155 $input=14799 $shape=14800 #14799=(1,6,6,8,8,192)f32 #x_windows.155=(36,8,8,192)f32 Tensor.view Tensor.view_1827 2 1 x_windows.155 14802 x1.83 $input=x_windows.155 $shape=14802 #x_windows.155=(36,8,8,192)f32 #x1.83=(36,64,192)f32 aten::size pnnx_15565 2 1 x1.83 14807 14823 #x1.83=(36,64,192)f32 prim::NumToTensor pnnx_15566 1 1 14823 B_.155 aten::Int pnnx_15567 1 1 B_.155 14825 aten::Int pnnx_15568 1 1 B_.155 14826 aten::size pnnx_15569 2 1 x1.83 14808 14827 #x1.83=(36,64,192)f32 prim::NumToTensor pnnx_15570 1 1 14827 N.155 aten::Int pnnx_15571 1 1 N.155 14829 aten::Int pnnx_15572 1 1 N.155 14830 aten::size pnnx_15573 2 1 x1.83 14809 14831 #x1.83=(36,64,192)f32 prim::NumToTensor pnnx_15574 1 1 14831 C.317 aten::Int pnnx_15575 1 1 C.317 14833 nn.Linear layers_mmsa.0.residual_group.blocks.4.attn.qkv 1 1 x1.83 14834 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.83=(36,64,192)f32 #14834=(36,64,576)f32 aten::div pnnx_15576 3 1 C.317 14806 14805 14835 aten::Int pnnx_15577 1 1 14835 14836 prim::ListConstruct pnnx_15578 5 1 14826 14830 14810 14811 14836 14837 prim::Constant pnnx_15580 0 1 24889 value=2 prim::Constant pnnx_15581 0 1 24890 value=0 prim::Constant pnnx_15582 0 1 24891 value=3 prim::Constant pnnx_15583 0 1 24892 value=1 prim::ListConstruct pnnx_15584 5 1 24889 24890 24891 24892 14812 14839 Tensor.reshape Tensor.reshape_586 2 1 14834 14837 14838 $input=14834 $shape=14837 #14834=(36,64,576)f32 #14838=(36,64,3,6,32)f32 prim::Constant pnnx_15586 0 1 24893 value=0 prim::Constant pnnx_15587 0 1 24894 value=0 prim::Constant pnnx_15589 0 1 24895 value=0 prim::Constant pnnx_15590 0 1 24896 value=1 prim::Constant pnnx_15592 0 1 24897 value=0 prim::Constant pnnx_15593 0 1 24898 value=2 torch.permute torch.permute_2833 2 1 14838 14839 qkv0.83 $input=14838 $dims=14839 #14838=(36,64,3,6,32)f32 #qkv0.83=(3,36,6,64,32)f32 Tensor.select Tensor.select_878 3 1 qkv0.83 24893 24894 q.155 $input=qkv0.83 $dim=24893 $index=24894 #qkv0.83=(3,36,6,64,32)f32 #q.155=(36,6,64,32)f32 aten::mul pnnx_15595 2 1 q.155 14804 q0.83 #q.155=(36,6,64,32)f32 #q0.83=(36,6,64,32)f32 Tensor.select Tensor.select_879 3 1 qkv0.83 24895 24896 k.155 $input=qkv0.83 $dim=24895 $index=24896 #qkv0.83=(3,36,6,64,32)f32 #k.155=(36,6,64,32)f32 prim::Constant pnnx_15598 0 1 24899 value=-1 prim::ListConstruct pnnx_15599 1 1 24899 14847 Tensor.view Tensor.view_1828 2 1 relative_position_index.155 14847 14848 $input=relative_position_index.155 $shape=14847 #relative_position_index.155=(64,64)i64 #14848=(4096)i64 prim::ListConstruct pnnx_15601 1 1 14848 14849 #14848=(4096)i64 prim::Constant pnnx_15603 0 1 24900 value=64 prim::Constant pnnx_15604 0 1 24901 value=-1 prim::ListConstruct pnnx_15605 3 1 14815 24900 24901 14851 Tensor.index Tensor.index_402 2 1 relative_position_bias_table.155 14849 14850 $input=relative_position_bias_table.155 $expr=14849 #relative_position_bias_table.155=(225,6)f32 #14850=(4096,6)f32 prim::Constant pnnx_15607 0 1 24902 value=2 prim::Constant pnnx_15608 0 1 24903 value=0 prim::Constant pnnx_15609 0 1 24904 value=1 prim::ListConstruct pnnx_15610 3 1 24902 24903 24904 14853 Tensor.view Tensor.view_1829 2 1 14850 14851 relative_position_bias.155 $input=14850 $shape=14851 #14850=(4096,6)f32 #relative_position_bias.155=(64,64,6)f32 prim::Constant pnnx_15614 0 1 24906 value=0 torch.permute torch.permute_2834 2 1 relative_position_bias.155 14853 14854 $input=relative_position_bias.155 $dims=14853 #relative_position_bias.155=(64,64,6)f32 #14854=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_231 1 1 14854 relative_position_bias0.83 memory_format=torch.contiguous_format $input=14854 #14854=(6,64,64)f32 #relative_position_bias0.83=(6,64,64)f32 prim::Constant pnnx_15616 0 1 24907 value=1 torch.transpose torch.transpose_3141 3 1 k.155 14813 14814 14845 $input=k.155 $dim0=14813 $dim1=14814 #k.155=(36,6,64,32)f32 #14845=(36,6,32,64)f32 torch.matmul torch.matmul_2356 2 1 q0.83 14845 attn.311 $input=q0.83 $other=14845 #q0.83=(36,6,64,32)f32 #14845=(36,6,32,64)f32 #attn.311=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3370 2 1 relative_position_bias0.83 24906 14856 $input=relative_position_bias0.83 $dim=24906 #relative_position_bias0.83=(6,64,64)f32 #14856=(1,6,64,64)f32 aten::add pnnx_15617 3 1 attn.311 14856 24907 input.349 #attn.311=(36,6,64,64)f32 #14856=(1,6,64,64)f32 #input.349=(36,6,64,64)f32 nn.Softmax layers_mmsa.0.residual_group.blocks.4.attn.softmax 1 1 input.349 14858 dim=-1 #input.349=(36,6,64,64)f32 #14858=(36,6,64,64)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.4.attn.attn_drop 1 1 14858 14859 #14858=(36,6,64,64)f32 #14859=(36,6,64,64)f32 Tensor.select Tensor.select_880 3 1 qkv0.83 24897 24898 v.155 $input=qkv0.83 $dim=24897 $index=24898 #qkv0.83=(3,36,6,64,32)f32 #v.155=(36,6,64,32)f32 prim::Constant pnnx_15619 0 1 24908 value=1 prim::Constant pnnx_15620 0 1 24909 value=2 torch.matmul torch.matmul_2357 2 1 14859 v.155 14860 $input=14859 $other=v.155 #14859=(36,6,64,64)f32 #v.155=(36,6,64,32)f32 #14860=(36,6,64,32)f32 prim::ListConstruct pnnx_15622 3 1 14825 14829 14833 14862 torch.transpose torch.transpose_3142 3 1 14860 24908 24909 14861 $input=14860 $dim0=24908 $dim1=24909 #14860=(36,6,64,32)f32 #14861=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_587 2 1 14861 14862 input0.87 $input=14861 $shape=14862 #14861=(36,64,6,32)f32 #input0.87=(36,64,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.4.attn.proj 1 1 input0.87 14864 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.87=(36,64,192)f32 #14864=(36,64,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.4.attn.proj_drop 1 1 14864 14865 #14864=(36,64,192)f32 #14865=(36,64,192)f32 prim::Constant pnnx_15624 0 1 24910 value=-1 prim::Constant pnnx_15625 0 1 24911 value=8 prim::Constant pnnx_15626 0 1 24912 value=8 prim::ListConstruct pnnx_15627 4 1 24910 24911 24912 14774 14866 prim::Constant pnnx_15629 0 1 24913 value=8 prim::Constant pnnx_15630 0 1 24914 value=trunc aten::div pnnx_15631 3 1 H1.1 24913 24914 14868 aten::Int pnnx_15632 1 1 14868 14869 prim::Constant pnnx_15633 0 1 24915 value=8 prim::Constant pnnx_15634 0 1 24916 value=trunc aten::div pnnx_15635 3 1 W1.1 24915 24916 14870 aten::Int pnnx_15636 1 1 14870 14871 prim::Constant pnnx_15637 0 1 24917 value=1 prim::Constant pnnx_15638 0 1 24918 value=8 prim::Constant pnnx_15639 0 1 24919 value=8 prim::Constant pnnx_15640 0 1 24920 value=-1 prim::ListConstruct pnnx_15641 6 1 24917 14869 14871 24918 24919 24920 14872 prim::Constant pnnx_15643 0 1 24921 value=0 prim::Constant pnnx_15644 0 1 24922 value=1 prim::Constant pnnx_15645 0 1 24923 value=3 prim::Constant pnnx_15646 0 1 24924 value=2 prim::Constant pnnx_15647 0 1 24925 value=4 prim::Constant pnnx_15648 0 1 24926 value=5 prim::ListConstruct pnnx_15649 6 1 24921 24922 24923 24924 24925 24926 14874 Tensor.view Tensor.view_1830 2 1 14865 14866 windows.155 $input=14865 $shape=14866 #14865=(36,64,192)f32 #windows.155=(36,8,8,192)f32 Tensor.view Tensor.view_1831 2 1 windows.155 14872 x2.83 $input=windows.155 $shape=14872 #windows.155=(36,8,8,192)f32 #x2.83=(1,6,6,8,8,192)f32 prim::Constant pnnx_15653 0 1 24928 value=1 prim::Constant pnnx_15654 0 1 24929 value=-1 prim::ListConstruct pnnx_15655 4 1 24928 1744 1984 24929 14877 torch.permute torch.permute_2835 2 1 x2.83 14874 14875 $input=x2.83 $dims=14874 #x2.83=(1,6,6,8,8,192)f32 #14875=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_232 1 1 14875 14876 memory_format=torch.contiguous_format $input=14875 #14875=(1,6,8,6,8,192)f32 #14876=(1,6,8,6,8,192)f32 aten::mul pnnx_15657 2 1 H1.1 W1.1 14879 aten::Int pnnx_15658 1 1 14879 14880 prim::ListConstruct pnnx_15659 3 1 14769 14880 14773 14881 prim::Constant pnnx_15661 0 1 14883 value=None prim::Constant pnnx_15662 0 1 24930 value=1 Tensor.view Tensor.view_1832 2 1 14876 14877 x3.83 $input=14876 $shape=14877 #14876=(1,6,8,6,8,192)f32 #x3.83=(1,48,48,192)f32 Tensor.view Tensor.view_1833 2 1 x3.83 14881 x4.83 $input=x3.83 $shape=14881 #x3.83=(1,48,48,192)f32 #x4.83=(1,2304,192)f32 aten::add pnnx_15663 3 1 14750 x4.83 24930 input.351 #14750=(1,2304,192)f32 #x4.83=(1,2304,192)f32 #input.351=(1,2304,192)f32 nn.LayerNorm layers_mmsa.0.residual_group.blocks.4.norm2 1 1 input.351 14885 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.351=(1,2304,192)f32 #14885=(1,2304,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.4.mlp.fc1 1 1 14885 14890 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #14885=(1,2304,192)f32 #14890=(1,2304,384)f32 nn.GELU layers_mmsa.0.residual_group.blocks.4.mlp.act 1 1 14890 14891 #14890=(1,2304,384)f32 #14891=(1,2304,384)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.4.mlp.drop 1 1 14891 14892 #14891=(1,2304,384)f32 #14892=(1,2304,384)f32 nn.Linear layers_mmsa.0.residual_group.blocks.4.mlp.fc2 1 1 14892 14893 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #14892=(1,2304,384)f32 #14893=(1,2304,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.4.mlp.drop 1 1 14893 14894 #14893=(1,2304,192)f32 #14894=(1,2304,192)f32 prim::Constant pnnx_15664 0 1 14895 value=None prim::Constant pnnx_15665 0 1 24931 value=1 aten::add pnnx_15666 3 1 input.351 14894 24931 14896 #input.351=(1,2304,192)f32 #14894=(1,2304,192)f32 #14896=(1,2304,192)f32 prim::Constant pnnx_15667 0 1 14897 value=trunc prim::Constant pnnx_15668 0 1 14898 value=8 prim::Constant pnnx_15669 0 1 14899 value=0 prim::Constant pnnx_15670 0 1 14900 value=2 prim::Constant pnnx_15671 0 1 14901 value=-4 prim::Constant pnnx_15672 0 1 14902 value=1 prim::Constant pnnx_15673 0 1 14903 value=3 prim::Constant pnnx_15674 0 1 14904 value=8 prim::Constant pnnx_15675 0 1 14905 value=4 prim::Constant pnnx_15676 0 1 14906 value=5 prim::Constant pnnx_15677 0 1 14907 value=-1 prim::Constant pnnx_15678 0 1 14908 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.5 0 1 attn_mask.79 @attn_mask=(36,64,64)f32 #attn_mask.79=(36,64,64)f32 aten::size pnnx_15679 2 1 14896 14899 14915 #14896=(1,2304,192)f32 prim::NumToTensor pnnx_15680 1 1 14915 B.187 aten::Int pnnx_15681 1 1 B.187 14917 aten::Int pnnx_15682 1 1 B.187 14918 aten::size pnnx_15683 2 1 14896 14900 14919 #14896=(1,2304,192)f32 prim::NumToTensor pnnx_15684 1 1 14919 C.319 aten::Int pnnx_15685 1 1 C.319 14921 aten::Int pnnx_15686 1 1 C.319 14922 aten::Int pnnx_15687 1 1 C.319 14923 aten::Int pnnx_15688 1 1 C.319 14924 nn.LayerNorm layers_mmsa.0.residual_group.blocks.5.norm1 1 1 14896 14925 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #14896=(1,2304,192)f32 #14925=(1,2304,192)f32 prim::ListConstruct pnnx_15689 4 1 14918 1741 1981 14924 14926 prim::Constant pnnx_15691 0 1 24932 value=-4 prim::ListConstruct pnnx_15692 2 1 14901 24932 14928 prim::Constant pnnx_15693 0 1 24933 value=2 prim::ListConstruct pnnx_15694 2 1 14902 24933 14929 Tensor.view Tensor.view_1834 2 1 14925 14926 x.157 $input=14925 $shape=14926 #14925=(1,2304,192)f32 #x.157=(1,48,48,192)f32 prim::Constant pnnx_15696 0 1 24934 value=0 torch.roll torch.roll_2496 3 1 x.157 14928 14929 x0.85 $input=x.157 $shifts=14928 $dims=14929 #x.157=(1,48,48,192)f32 #x0.85=(1,48,48,192)f32 aten::size pnnx_15697 2 1 x0.85 24934 14931 #x0.85=(1,48,48,192)f32 prim::NumToTensor pnnx_15698 1 1 14931 B0.85 aten::Int pnnx_15699 1 1 B0.85 14933 prim::Constant pnnx_15700 0 1 24935 value=1 aten::size pnnx_15701 2 1 x0.85 24935 14934 #x0.85=(1,48,48,192)f32 prim::NumToTensor pnnx_15702 1 1 14934 14935 prim::Constant pnnx_15703 0 1 24936 value=2 aten::size pnnx_15704 2 1 x0.85 24936 14936 #x0.85=(1,48,48,192)f32 prim::NumToTensor pnnx_15705 1 1 14936 14937 aten::size pnnx_15706 2 1 x0.85 14903 14938 #x0.85=(1,48,48,192)f32 prim::NumToTensor pnnx_15707 1 1 14938 C0.85 aten::Int pnnx_15708 1 1 C0.85 14940 aten::Int pnnx_15709 1 1 C0.85 14941 aten::div pnnx_15710 3 1 14935 14898 14897 14942 aten::Int pnnx_15711 1 1 14942 14943 prim::Constant pnnx_15712 0 1 24937 value=8 prim::Constant pnnx_15713 0 1 24938 value=trunc aten::div pnnx_15714 3 1 14937 24937 24938 14944 aten::Int pnnx_15715 1 1 14944 14945 prim::Constant pnnx_15716 0 1 24939 value=8 prim::ListConstruct pnnx_15717 6 1 14933 14943 14904 14945 24939 14941 14946 prim::Constant pnnx_15719 0 1 24940 value=0 prim::Constant pnnx_15720 0 1 24941 value=1 prim::Constant pnnx_15721 0 1 24942 value=3 prim::Constant pnnx_15722 0 1 24943 value=2 prim::ListConstruct pnnx_15723 6 1 24940 24941 24942 24943 14905 14906 14948 Tensor.view Tensor.view_1835 2 1 x0.85 14946 x1.85 $input=x0.85 $shape=14946 #x0.85=(1,48,48,192)f32 #x1.85=(1,6,8,6,8,192)f32 prim::Constant pnnx_15727 0 1 24945 value=8 prim::Constant pnnx_15728 0 1 24946 value=8 prim::ListConstruct pnnx_15729 4 1 14907 24945 24946 14940 14951 torch.permute torch.permute_2836 2 1 x1.85 14948 14949 $input=x1.85 $dims=14948 #x1.85=(1,6,8,6,8,192)f32 #14949=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_233 1 1 14949 14950 memory_format=torch.contiguous_format $input=14949 #14949=(1,6,6,8,8,192)f32 #14950=(1,6,6,8,8,192)f32 prim::Constant pnnx_15731 0 1 24947 value=-1 prim::ListConstruct pnnx_15732 3 1 24947 14908 14923 14953 prim::Constant pnnx_15734 0 1 14955 value=1.767767e-01 prim::Constant pnnx_15735 0 1 14956 value=trunc prim::Constant pnnx_15736 0 1 14957 value=6 prim::Constant pnnx_15737 0 1 14958 value=0 prim::Constant pnnx_15738 0 1 14959 value=1 prim::Constant pnnx_15739 0 1 14960 value=2 prim::Constant pnnx_15740 0 1 14961 value=3 prim::Constant pnnx_15741 0 1 14962 value=6 prim::Constant pnnx_15742 0 1 14963 value=4 prim::Constant pnnx_15743 0 1 14964 value=-2 prim::Constant pnnx_15744 0 1 14965 value=-1 prim::Constant pnnx_15745 0 1 14966 value=64 pnnx.Attribute layers_mmsa.0.residual_group.blocks.5.attn 0 1 relative_position_bias_table.157 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.157=(225,6)f32 pnnx.Attribute layers_mmsa.0.residual_group.blocks.5.attn 0 1 relative_position_index.157 @relative_position_index=(64,64)i64 #relative_position_index.157=(64,64)i64 Tensor.view Tensor.view_1836 2 1 14950 14951 x_windows.157 $input=14950 $shape=14951 #14950=(1,6,6,8,8,192)f32 #x_windows.157=(36,8,8,192)f32 Tensor.view Tensor.view_1837 2 1 x_windows.157 14953 x2.85 $input=x_windows.157 $shape=14953 #x_windows.157=(36,8,8,192)f32 #x2.85=(36,64,192)f32 aten::size pnnx_15746 2 1 x2.85 14958 14974 #x2.85=(36,64,192)f32 prim::NumToTensor pnnx_15747 1 1 14974 B_.157 aten::Int pnnx_15748 1 1 B_.157 14976 aten::Int pnnx_15749 1 1 B_.157 14977 aten::size pnnx_15750 2 1 x2.85 14959 14978 #x2.85=(36,64,192)f32 prim::NumToTensor pnnx_15751 1 1 14978 N.157 aten::Int pnnx_15752 1 1 N.157 14980 aten::Int pnnx_15753 1 1 N.157 14981 aten::Int pnnx_15754 1 1 N.157 14982 aten::Int pnnx_15755 1 1 N.157 14983 aten::Int pnnx_15756 1 1 N.157 14984 aten::Int pnnx_15757 1 1 N.157 14985 aten::size pnnx_15758 2 1 x2.85 14960 14986 #x2.85=(36,64,192)f32 prim::NumToTensor pnnx_15759 1 1 14986 C.321 aten::Int pnnx_15760 1 1 C.321 14988 nn.Linear layers_mmsa.0.residual_group.blocks.5.attn.qkv 1 1 x2.85 14989 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.85=(36,64,192)f32 #14989=(36,64,576)f32 aten::div pnnx_15761 3 1 C.321 14957 14956 14990 aten::Int pnnx_15762 1 1 14990 14991 prim::ListConstruct pnnx_15763 5 1 14977 14985 14961 14962 14991 14992 prim::Constant pnnx_15765 0 1 24948 value=2 prim::Constant pnnx_15766 0 1 24949 value=0 prim::Constant pnnx_15767 0 1 24950 value=3 prim::Constant pnnx_15768 0 1 24951 value=1 prim::ListConstruct pnnx_15769 5 1 24948 24949 24950 24951 14963 14994 Tensor.reshape Tensor.reshape_588 2 1 14989 14992 14993 $input=14989 $shape=14992 #14989=(36,64,576)f32 #14993=(36,64,3,6,32)f32 prim::Constant pnnx_15771 0 1 24952 value=0 prim::Constant pnnx_15772 0 1 24953 value=0 prim::Constant pnnx_15774 0 1 24954 value=0 prim::Constant pnnx_15775 0 1 24955 value=1 prim::Constant pnnx_15777 0 1 24956 value=0 prim::Constant pnnx_15778 0 1 24957 value=2 torch.permute torch.permute_2837 2 1 14993 14994 qkv0.85 $input=14993 $dims=14994 #14993=(36,64,3,6,32)f32 #qkv0.85=(3,36,6,64,32)f32 Tensor.select Tensor.select_881 3 1 qkv0.85 24952 24953 q.157 $input=qkv0.85 $dim=24952 $index=24953 #qkv0.85=(3,36,6,64,32)f32 #q.157=(36,6,64,32)f32 aten::mul pnnx_15780 2 1 q.157 14955 q0.85 #q.157=(36,6,64,32)f32 #q0.85=(36,6,64,32)f32 Tensor.select Tensor.select_882 3 1 qkv0.85 24954 24955 k.157 $input=qkv0.85 $dim=24954 $index=24955 #qkv0.85=(3,36,6,64,32)f32 #k.157=(36,6,64,32)f32 prim::Constant pnnx_15783 0 1 24958 value=-1 prim::ListConstruct pnnx_15784 1 1 24958 15002 Tensor.view Tensor.view_1838 2 1 relative_position_index.157 15002 15003 $input=relative_position_index.157 $shape=15002 #relative_position_index.157=(64,64)i64 #15003=(4096)i64 prim::ListConstruct pnnx_15786 1 1 15003 15004 #15003=(4096)i64 prim::Constant pnnx_15788 0 1 24959 value=64 prim::Constant pnnx_15789 0 1 24960 value=-1 prim::ListConstruct pnnx_15790 3 1 14966 24959 24960 15006 Tensor.index Tensor.index_403 2 1 relative_position_bias_table.157 15004 15005 $input=relative_position_bias_table.157 $expr=15004 #relative_position_bias_table.157=(225,6)f32 #15005=(4096,6)f32 prim::Constant pnnx_15792 0 1 24961 value=2 prim::Constant pnnx_15793 0 1 24962 value=0 prim::Constant pnnx_15794 0 1 24963 value=1 prim::ListConstruct pnnx_15795 3 1 24961 24962 24963 15008 Tensor.view Tensor.view_1839 2 1 15005 15006 relative_position_bias.157 $input=15005 $shape=15006 #15005=(4096,6)f32 #relative_position_bias.157=(64,64,6)f32 prim::Constant pnnx_15799 0 1 24965 value=0 torch.permute torch.permute_2838 2 1 relative_position_bias.157 15008 15009 $input=relative_position_bias.157 $dims=15008 #relative_position_bias.157=(64,64,6)f32 #15009=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_234 1 1 15009 relative_position_bias0.85 memory_format=torch.contiguous_format $input=15009 #15009=(6,64,64)f32 #relative_position_bias0.85=(6,64,64)f32 prim::Constant pnnx_15801 0 1 24966 value=1 torch.transpose torch.transpose_3143 3 1 k.157 14964 14965 15000 $input=k.157 $dim0=14964 $dim1=14965 #k.157=(36,6,64,32)f32 #15000=(36,6,32,64)f32 torch.matmul torch.matmul_2358 2 1 q0.85 15000 attn.315 $input=q0.85 $other=15000 #q0.85=(36,6,64,32)f32 #15000=(36,6,32,64)f32 #attn.315=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3371 2 1 relative_position_bias0.85 24965 15011 $input=relative_position_bias0.85 $dim=24965 #relative_position_bias0.85=(6,64,64)f32 #15011=(1,6,64,64)f32 aten::add pnnx_15802 3 1 attn.315 15011 24966 attn0.43 #attn.315=(36,6,64,64)f32 #15011=(1,6,64,64)f32 #attn0.43=(36,6,64,64)f32 prim::Constant pnnx_15803 0 1 24967 value=0 aten::size pnnx_15804 2 1 attn_mask.79 24967 15013 #attn_mask.79=(36,64,64)f32 prim::NumToTensor pnnx_15805 1 1 15013 other.79 aten::Int pnnx_15806 1 1 other.79 15015 prim::Constant pnnx_15807 0 1 24968 value=trunc aten::div pnnx_15808 3 1 B_.157 other.79 24968 15016 aten::Int pnnx_15809 1 1 15016 15017 prim::Constant pnnx_15810 0 1 24969 value=6 prim::ListConstruct pnnx_15811 5 1 15017 15015 24969 14984 14983 15018 prim::Constant pnnx_15813 0 1 24970 value=1 prim::Constant pnnx_15815 0 1 24971 value=0 prim::Constant pnnx_15817 0 1 24972 value=1 Tensor.view Tensor.view_1840 2 1 attn0.43 15018 15019 $input=attn0.43 $shape=15018 #attn0.43=(36,6,64,64)f32 #15019=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3372 2 1 attn_mask.79 24970 15020 $input=attn_mask.79 $dim=24970 #attn_mask.79=(36,64,64)f32 #15020=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3373 2 1 15020 24971 15021 $input=15020 $dim=24971 #15020=(36,1,64,64)f32 #15021=(1,36,1,64,64)f32 aten::add pnnx_15818 3 1 15019 15021 24972 attn1.43 #15019=(1,36,6,64,64)f32 #15021=(1,36,1,64,64)f32 #attn1.43=(1,36,6,64,64)f32 prim::Constant pnnx_15819 0 1 24973 value=-1 prim::Constant pnnx_15820 0 1 24974 value=6 prim::ListConstruct pnnx_15821 4 1 24973 24974 14982 14981 15023 Tensor.view Tensor.view_1841 2 1 attn1.43 15023 input.353 $input=attn1.43 $shape=15023 #attn1.43=(1,36,6,64,64)f32 #input.353=(36,6,64,64)f32 nn.Softmax layers_mmsa.0.residual_group.blocks.5.attn.softmax 1 1 input.353 15025 dim=-1 #input.353=(36,6,64,64)f32 #15025=(36,6,64,64)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.5.attn.attn_drop 1 1 15025 15026 #15025=(36,6,64,64)f32 #15026=(36,6,64,64)f32 Tensor.select Tensor.select_883 3 1 qkv0.85 24956 24957 v.157 $input=qkv0.85 $dim=24956 $index=24957 #qkv0.85=(3,36,6,64,32)f32 #v.157=(36,6,64,32)f32 prim::Constant pnnx_15824 0 1 24975 value=1 prim::Constant pnnx_15825 0 1 24976 value=2 torch.matmul torch.matmul_2359 2 1 15026 v.157 15027 $input=15026 $other=v.157 #15026=(36,6,64,64)f32 #v.157=(36,6,64,32)f32 #15027=(36,6,64,32)f32 prim::ListConstruct pnnx_15827 3 1 14976 14980 14988 15029 torch.transpose torch.transpose_3144 3 1 15027 24975 24976 15028 $input=15027 $dim0=24975 $dim1=24976 #15027=(36,6,64,32)f32 #15028=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_589 2 1 15028 15029 input0.89 $input=15028 $shape=15029 #15028=(36,64,6,32)f32 #input0.89=(36,64,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.5.attn.proj 1 1 input0.89 15031 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.89=(36,64,192)f32 #15031=(36,64,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.5.attn.proj_drop 1 1 15031 15032 #15031=(36,64,192)f32 #15032=(36,64,192)f32 prim::Constant pnnx_15829 0 1 24977 value=-1 prim::Constant pnnx_15830 0 1 24978 value=8 prim::Constant pnnx_15831 0 1 24979 value=8 prim::ListConstruct pnnx_15832 4 1 24977 24978 24979 14922 15033 prim::Constant pnnx_15834 0 1 24980 value=8 prim::Constant pnnx_15835 0 1 24981 value=trunc aten::div pnnx_15836 3 1 H1.1 24980 24981 15035 aten::Int pnnx_15837 1 1 15035 15036 prim::Constant pnnx_15838 0 1 24982 value=8 prim::Constant pnnx_15839 0 1 24983 value=trunc aten::div pnnx_15840 3 1 W1.1 24982 24983 15037 aten::Int pnnx_15841 1 1 15037 15038 prim::Constant pnnx_15842 0 1 24984 value=1 prim::Constant pnnx_15843 0 1 24985 value=8 prim::Constant pnnx_15844 0 1 24986 value=8 prim::Constant pnnx_15845 0 1 24987 value=-1 prim::ListConstruct pnnx_15846 6 1 24984 15036 15038 24985 24986 24987 15039 prim::Constant pnnx_15848 0 1 24988 value=0 prim::Constant pnnx_15849 0 1 24989 value=1 prim::Constant pnnx_15850 0 1 24990 value=3 prim::Constant pnnx_15851 0 1 24991 value=2 prim::Constant pnnx_15852 0 1 24992 value=4 prim::Constant pnnx_15853 0 1 24993 value=5 prim::ListConstruct pnnx_15854 6 1 24988 24989 24990 24991 24992 24993 15041 Tensor.view Tensor.view_1842 2 1 15032 15033 windows.157 $input=15032 $shape=15033 #15032=(36,64,192)f32 #windows.157=(36,8,8,192)f32 Tensor.view Tensor.view_1843 2 1 windows.157 15039 x3.85 $input=windows.157 $shape=15039 #windows.157=(36,8,8,192)f32 #x3.85=(1,6,6,8,8,192)f32 prim::Constant pnnx_15858 0 1 24995 value=1 prim::Constant pnnx_15859 0 1 24996 value=-1 prim::ListConstruct pnnx_15860 4 1 24995 1738 1978 24996 15044 torch.permute torch.permute_2839 2 1 x3.85 15041 15042 $input=x3.85 $dims=15041 #x3.85=(1,6,6,8,8,192)f32 #15042=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_235 1 1 15042 15043 memory_format=torch.contiguous_format $input=15042 #15042=(1,6,8,6,8,192)f32 #15043=(1,6,8,6,8,192)f32 prim::Constant pnnx_15862 0 1 24997 value=4 prim::Constant pnnx_15863 0 1 24998 value=4 prim::ListConstruct pnnx_15864 2 1 24997 24998 15046 prim::Constant pnnx_15865 0 1 24999 value=1 prim::Constant pnnx_15866 0 1 25000 value=2 prim::ListConstruct pnnx_15867 2 1 24999 25000 15047 Tensor.view Tensor.view_1844 2 1 15043 15044 shifted_x.79 $input=15043 $shape=15044 #15043=(1,6,8,6,8,192)f32 #shifted_x.79=(1,48,48,192)f32 aten::mul pnnx_15869 2 1 H1.1 W1.1 15049 aten::Int pnnx_15870 1 1 15049 15050 prim::ListConstruct pnnx_15871 3 1 14917 15050 14921 15051 prim::Constant pnnx_15873 0 1 15053 value=None prim::Constant pnnx_15874 0 1 25001 value=1 torch.roll torch.roll_2497 3 1 shifted_x.79 15046 15047 x4.85 $input=shifted_x.79 $shifts=15046 $dims=15047 #shifted_x.79=(1,48,48,192)f32 #x4.85=(1,48,48,192)f32 Tensor.view Tensor.view_1845 2 1 x4.85 15051 x5.79 $input=x4.85 $shape=15051 #x4.85=(1,48,48,192)f32 #x5.79=(1,2304,192)f32 aten::add pnnx_15875 3 1 14896 x5.79 25001 input.355 #14896=(1,2304,192)f32 #x5.79=(1,2304,192)f32 #input.355=(1,2304,192)f32 nn.LayerNorm layers_mmsa.0.residual_group.blocks.5.norm2 1 1 input.355 15055 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.355=(1,2304,192)f32 #15055=(1,2304,192)f32 nn.Linear layers_mmsa.0.residual_group.blocks.5.mlp.fc1 1 1 15055 15060 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #15055=(1,2304,192)f32 #15060=(1,2304,384)f32 nn.GELU layers_mmsa.0.residual_group.blocks.5.mlp.act 1 1 15060 15061 #15060=(1,2304,384)f32 #15061=(1,2304,384)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.5.mlp.drop 1 1 15061 15062 #15061=(1,2304,384)f32 #15062=(1,2304,384)f32 nn.Linear layers_mmsa.0.residual_group.blocks.5.mlp.fc2 1 1 15062 15063 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #15062=(1,2304,384)f32 #15063=(1,2304,192)f32 nn.Dropout layers_mmsa.0.residual_group.blocks.5.mlp.drop 1 1 15063 15064 #15063=(1,2304,192)f32 #15064=(1,2304,192)f32 prim::Constant pnnx_15876 0 1 15065 value=None prim::Constant pnnx_15877 0 1 25002 value=1 aten::add pnnx_15878 3 1 input.355 15064 25002 15066 #input.355=(1,2304,192)f32 #15064=(1,2304,192)f32 #15066=(1,2304,192)f32 prim::Constant pnnx_15879 0 1 15067 value=0 prim::Constant pnnx_15880 0 1 15068 value=1 prim::Constant pnnx_15881 0 1 15069 value=2 prim::Constant pnnx_15882 0 1 15070 value=192 aten::size pnnx_15883 2 1 15066 15067 15071 #15066=(1,2304,192)f32 prim::NumToTensor pnnx_15884 1 1 15071 B.189 aten::Int pnnx_15885 1 1 B.189 15073 prim::ListConstruct pnnx_15887 4 1 15073 15070 1735 1975 15075 torch.transpose torch.transpose_3145 3 1 15066 15068 15069 15074 $input=15066 $dim0=15068 $dim1=15069 #15066=(1,2304,192)f32 #15074=(1,192,2304)f32 Tensor.view Tensor.view_1846 2 1 15074 15075 input.357 $input=15074 $shape=15075 #15074=(1,192,2304)f32 #input.357=(1,192,48,48)f32 nn.Conv2d layers_mmsa.0.conv 1 1 input.357 15077 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.357=(1,192,48,48)f32 #15077=(1,192,48,48)f32 prim::Constant pnnx_15889 0 1 15078 value=-1 prim::Constant pnnx_15890 0 1 15079 value=2 prim::Constant pnnx_15891 0 1 15080 value=1 prim::Constant pnnx_15893 0 1 25003 value=2 torch.flatten torch.flatten_2198 3 1 15077 15079 15078 15081 $input=15077 $start_dim=15079 $end_dim=15078 #15077=(1,192,48,48)f32 #15081=(1,192,2304)f32 torch.transpose torch.transpose_3146 3 1 15081 15080 25003 15082 $input=15081 $dim0=15080 $dim1=25003 #15081=(1,192,2304)f32 #15082=(1,2304,192)f32 aten::add pnnx_15895 3 1 15082 2016 14102 15083 #15082=(1,2304,192)f32 #2016=(1,2304,192)f32 #15083=(1,2304,192)f32 prim::Constant pnnx_15896 0 1 15084 value=1 prim::Constant pnnx_15897 0 1 15101 value=trunc prim::Constant pnnx_15898 0 1 15102 value=8 prim::Constant pnnx_15899 0 1 15103 value=0 prim::Constant pnnx_15900 0 1 15104 value=2 prim::Constant pnnx_15901 0 1 15105 value=1 prim::Constant pnnx_15902 0 1 15106 value=3 prim::Constant pnnx_15903 0 1 15107 value=8 prim::Constant pnnx_15904 0 1 15108 value=4 prim::Constant pnnx_15905 0 1 15109 value=5 prim::Constant pnnx_15906 0 1 15110 value=-1 prim::Constant pnnx_15907 0 1 15111 value=64 aten::size pnnx_15908 2 1 15083 15103 15117 #15083=(1,2304,192)f32 prim::NumToTensor pnnx_15909 1 1 15117 B.191 aten::Int pnnx_15910 1 1 B.191 15119 aten::Int pnnx_15911 1 1 B.191 15120 aten::size pnnx_15912 2 1 15083 15104 15121 #15083=(1,2304,192)f32 prim::NumToTensor pnnx_15913 1 1 15121 C.323 aten::Int pnnx_15914 1 1 C.323 15123 aten::Int pnnx_15915 1 1 C.323 15124 aten::Int pnnx_15916 1 1 C.323 15125 aten::Int pnnx_15917 1 1 C.323 15126 nn.LayerNorm layers_mmsa.1.residual_group.blocks.0.norm1 1 1 15083 15127 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #15083=(1,2304,192)f32 #15127=(1,2304,192)f32 prim::ListConstruct pnnx_15918 4 1 15120 1732 1972 15126 15128 prim::Constant pnnx_15920 0 1 25004 value=0 Tensor.view Tensor.view_1847 2 1 15127 15128 x.159 $input=15127 $shape=15128 #15127=(1,2304,192)f32 #x.159=(1,48,48,192)f32 aten::size pnnx_15921 2 1 x.159 25004 15130 #x.159=(1,48,48,192)f32 prim::NumToTensor pnnx_15922 1 1 15130 B0.87 aten::Int pnnx_15923 1 1 B0.87 15132 aten::size pnnx_15924 2 1 x.159 15105 15133 #x.159=(1,48,48,192)f32 prim::NumToTensor pnnx_15925 1 1 15133 15134 prim::Constant pnnx_15926 0 1 25005 value=2 aten::size pnnx_15927 2 1 x.159 25005 15135 #x.159=(1,48,48,192)f32 prim::NumToTensor pnnx_15928 1 1 15135 15136 aten::size pnnx_15929 2 1 x.159 15106 15137 #x.159=(1,48,48,192)f32 prim::NumToTensor pnnx_15930 1 1 15137 C0.87 aten::Int pnnx_15931 1 1 C0.87 15139 aten::Int pnnx_15932 1 1 C0.87 15140 aten::div pnnx_15933 3 1 15134 15102 15101 15141 aten::Int pnnx_15934 1 1 15141 15142 prim::Constant pnnx_15935 0 1 25006 value=8 prim::Constant pnnx_15936 0 1 25007 value=trunc aten::div pnnx_15937 3 1 15136 25006 25007 15143 aten::Int pnnx_15938 1 1 15143 15144 prim::Constant pnnx_15939 0 1 25008 value=8 prim::ListConstruct pnnx_15940 6 1 15132 15142 15107 15144 25008 15140 15145 prim::Constant pnnx_15942 0 1 25009 value=0 prim::Constant pnnx_15943 0 1 25010 value=1 prim::Constant pnnx_15944 0 1 25011 value=3 prim::Constant pnnx_15945 0 1 25012 value=2 prim::ListConstruct pnnx_15946 6 1 25009 25010 25011 25012 15108 15109 15147 Tensor.view Tensor.view_1848 2 1 x.159 15145 x0.87 $input=x.159 $shape=15145 #x.159=(1,48,48,192)f32 #x0.87=(1,6,8,6,8,192)f32 prim::Constant pnnx_15950 0 1 25014 value=8 prim::Constant pnnx_15951 0 1 25015 value=8 prim::ListConstruct pnnx_15952 4 1 15110 25014 25015 15139 15150 torch.permute torch.permute_2840 2 1 x0.87 15147 15148 $input=x0.87 $dims=15147 #x0.87=(1,6,8,6,8,192)f32 #15148=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_236 1 1 15148 15149 memory_format=torch.contiguous_format $input=15148 #15148=(1,6,6,8,8,192)f32 #15149=(1,6,6,8,8,192)f32 prim::Constant pnnx_15954 0 1 25016 value=-1 prim::ListConstruct pnnx_15955 3 1 25016 15111 15125 15152 prim::Constant pnnx_15957 0 1 15154 value=1.767767e-01 prim::Constant pnnx_15958 0 1 15155 value=trunc prim::Constant pnnx_15959 0 1 15156 value=6 prim::Constant pnnx_15960 0 1 15157 value=0 prim::Constant pnnx_15961 0 1 15158 value=1 prim::Constant pnnx_15962 0 1 15159 value=2 prim::Constant pnnx_15963 0 1 15160 value=3 prim::Constant pnnx_15964 0 1 15161 value=6 prim::Constant pnnx_15965 0 1 15162 value=4 prim::Constant pnnx_15966 0 1 15163 value=-2 prim::Constant pnnx_15967 0 1 15164 value=-1 prim::Constant pnnx_15968 0 1 15165 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.0.attn 0 1 relative_position_bias_table.159 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.159=(225,6)f32 pnnx.Attribute layers_mmsa.1.residual_group.blocks.0.attn 0 1 relative_position_index.159 @relative_position_index=(64,64)i64 #relative_position_index.159=(64,64)i64 Tensor.view Tensor.view_1849 2 1 15149 15150 x_windows.159 $input=15149 $shape=15150 #15149=(1,6,6,8,8,192)f32 #x_windows.159=(36,8,8,192)f32 Tensor.view Tensor.view_1850 2 1 x_windows.159 15152 x1.87 $input=x_windows.159 $shape=15152 #x_windows.159=(36,8,8,192)f32 #x1.87=(36,64,192)f32 aten::size pnnx_15969 2 1 x1.87 15157 15173 #x1.87=(36,64,192)f32 prim::NumToTensor pnnx_15970 1 1 15173 B_.159 aten::Int pnnx_15971 1 1 B_.159 15175 aten::Int pnnx_15972 1 1 B_.159 15176 aten::size pnnx_15973 2 1 x1.87 15158 15177 #x1.87=(36,64,192)f32 prim::NumToTensor pnnx_15974 1 1 15177 N.159 aten::Int pnnx_15975 1 1 N.159 15179 aten::Int pnnx_15976 1 1 N.159 15180 aten::size pnnx_15977 2 1 x1.87 15159 15181 #x1.87=(36,64,192)f32 prim::NumToTensor pnnx_15978 1 1 15181 C.325 aten::Int pnnx_15979 1 1 C.325 15183 nn.Linear layers_mmsa.1.residual_group.blocks.0.attn.qkv 1 1 x1.87 15184 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.87=(36,64,192)f32 #15184=(36,64,576)f32 aten::div pnnx_15980 3 1 C.325 15156 15155 15185 aten::Int pnnx_15981 1 1 15185 15186 prim::ListConstruct pnnx_15982 5 1 15176 15180 15160 15161 15186 15187 prim::Constant pnnx_15984 0 1 25017 value=2 prim::Constant pnnx_15985 0 1 25018 value=0 prim::Constant pnnx_15986 0 1 25019 value=3 prim::Constant pnnx_15987 0 1 25020 value=1 prim::ListConstruct pnnx_15988 5 1 25017 25018 25019 25020 15162 15189 Tensor.reshape Tensor.reshape_590 2 1 15184 15187 15188 $input=15184 $shape=15187 #15184=(36,64,576)f32 #15188=(36,64,3,6,32)f32 prim::Constant pnnx_15990 0 1 25021 value=0 prim::Constant pnnx_15991 0 1 25022 value=0 prim::Constant pnnx_15993 0 1 25023 value=0 prim::Constant pnnx_15994 0 1 25024 value=1 prim::Constant pnnx_15996 0 1 25025 value=0 prim::Constant pnnx_15997 0 1 25026 value=2 torch.permute torch.permute_2841 2 1 15188 15189 qkv0.87 $input=15188 $dims=15189 #15188=(36,64,3,6,32)f32 #qkv0.87=(3,36,6,64,32)f32 Tensor.select Tensor.select_884 3 1 qkv0.87 25021 25022 q.159 $input=qkv0.87 $dim=25021 $index=25022 #qkv0.87=(3,36,6,64,32)f32 #q.159=(36,6,64,32)f32 aten::mul pnnx_15999 2 1 q.159 15154 q0.87 #q.159=(36,6,64,32)f32 #q0.87=(36,6,64,32)f32 Tensor.select Tensor.select_885 3 1 qkv0.87 25023 25024 k.159 $input=qkv0.87 $dim=25023 $index=25024 #qkv0.87=(3,36,6,64,32)f32 #k.159=(36,6,64,32)f32 prim::Constant pnnx_16002 0 1 25027 value=-1 prim::ListConstruct pnnx_16003 1 1 25027 15197 Tensor.view Tensor.view_1851 2 1 relative_position_index.159 15197 15198 $input=relative_position_index.159 $shape=15197 #relative_position_index.159=(64,64)i64 #15198=(4096)i64 prim::ListConstruct pnnx_16005 1 1 15198 15199 #15198=(4096)i64 prim::Constant pnnx_16007 0 1 25028 value=64 prim::Constant pnnx_16008 0 1 25029 value=-1 prim::ListConstruct pnnx_16009 3 1 15165 25028 25029 15201 Tensor.index Tensor.index_404 2 1 relative_position_bias_table.159 15199 15200 $input=relative_position_bias_table.159 $expr=15199 #relative_position_bias_table.159=(225,6)f32 #15200=(4096,6)f32 prim::Constant pnnx_16011 0 1 25030 value=2 prim::Constant pnnx_16012 0 1 25031 value=0 prim::Constant pnnx_16013 0 1 25032 value=1 prim::ListConstruct pnnx_16014 3 1 25030 25031 25032 15203 Tensor.view Tensor.view_1852 2 1 15200 15201 relative_position_bias.159 $input=15200 $shape=15201 #15200=(4096,6)f32 #relative_position_bias.159=(64,64,6)f32 prim::Constant pnnx_16018 0 1 25034 value=0 torch.permute torch.permute_2842 2 1 relative_position_bias.159 15203 15204 $input=relative_position_bias.159 $dims=15203 #relative_position_bias.159=(64,64,6)f32 #15204=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_237 1 1 15204 relative_position_bias0.87 memory_format=torch.contiguous_format $input=15204 #15204=(6,64,64)f32 #relative_position_bias0.87=(6,64,64)f32 prim::Constant pnnx_16020 0 1 25035 value=1 torch.transpose torch.transpose_3147 3 1 k.159 15163 15164 15195 $input=k.159 $dim0=15163 $dim1=15164 #k.159=(36,6,64,32)f32 #15195=(36,6,32,64)f32 torch.matmul torch.matmul_2360 2 1 q0.87 15195 attn.319 $input=q0.87 $other=15195 #q0.87=(36,6,64,32)f32 #15195=(36,6,32,64)f32 #attn.319=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3374 2 1 relative_position_bias0.87 25034 15206 $input=relative_position_bias0.87 $dim=25034 #relative_position_bias0.87=(6,64,64)f32 #15206=(1,6,64,64)f32 aten::add pnnx_16021 3 1 attn.319 15206 25035 input.359 #attn.319=(36,6,64,64)f32 #15206=(1,6,64,64)f32 #input.359=(36,6,64,64)f32 nn.Softmax layers_mmsa.1.residual_group.blocks.0.attn.softmax 1 1 input.359 15208 dim=-1 #input.359=(36,6,64,64)f32 #15208=(36,6,64,64)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.0.attn.attn_drop 1 1 15208 15209 #15208=(36,6,64,64)f32 #15209=(36,6,64,64)f32 Tensor.select Tensor.select_886 3 1 qkv0.87 25025 25026 v.159 $input=qkv0.87 $dim=25025 $index=25026 #qkv0.87=(3,36,6,64,32)f32 #v.159=(36,6,64,32)f32 prim::Constant pnnx_16023 0 1 25036 value=1 prim::Constant pnnx_16024 0 1 25037 value=2 torch.matmul torch.matmul_2361 2 1 15209 v.159 15210 $input=15209 $other=v.159 #15209=(36,6,64,64)f32 #v.159=(36,6,64,32)f32 #15210=(36,6,64,32)f32 prim::ListConstruct pnnx_16026 3 1 15175 15179 15183 15212 torch.transpose torch.transpose_3148 3 1 15210 25036 25037 15211 $input=15210 $dim0=25036 $dim1=25037 #15210=(36,6,64,32)f32 #15211=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_591 2 1 15211 15212 input0.91 $input=15211 $shape=15212 #15211=(36,64,6,32)f32 #input0.91=(36,64,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.0.attn.proj 1 1 input0.91 15214 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.91=(36,64,192)f32 #15214=(36,64,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.0.attn.proj_drop 1 1 15214 15215 #15214=(36,64,192)f32 #15215=(36,64,192)f32 prim::Constant pnnx_16028 0 1 25038 value=-1 prim::Constant pnnx_16029 0 1 25039 value=8 prim::Constant pnnx_16030 0 1 25040 value=8 prim::ListConstruct pnnx_16031 4 1 25038 25039 25040 15124 15216 prim::Constant pnnx_16033 0 1 25041 value=8 prim::Constant pnnx_16034 0 1 25042 value=trunc aten::div pnnx_16035 3 1 H1.1 25041 25042 15218 aten::Int pnnx_16036 1 1 15218 15219 prim::Constant pnnx_16037 0 1 25043 value=8 prim::Constant pnnx_16038 0 1 25044 value=trunc aten::div pnnx_16039 3 1 W1.1 25043 25044 15220 aten::Int pnnx_16040 1 1 15220 15221 prim::Constant pnnx_16041 0 1 25045 value=1 prim::Constant pnnx_16042 0 1 25046 value=8 prim::Constant pnnx_16043 0 1 25047 value=8 prim::Constant pnnx_16044 0 1 25048 value=-1 prim::ListConstruct pnnx_16045 6 1 25045 15219 15221 25046 25047 25048 15222 prim::Constant pnnx_16047 0 1 25049 value=0 prim::Constant pnnx_16048 0 1 25050 value=1 prim::Constant pnnx_16049 0 1 25051 value=3 prim::Constant pnnx_16050 0 1 25052 value=2 prim::Constant pnnx_16051 0 1 25053 value=4 prim::Constant pnnx_16052 0 1 25054 value=5 prim::ListConstruct pnnx_16053 6 1 25049 25050 25051 25052 25053 25054 15224 Tensor.view Tensor.view_1853 2 1 15215 15216 windows.159 $input=15215 $shape=15216 #15215=(36,64,192)f32 #windows.159=(36,8,8,192)f32 Tensor.view Tensor.view_1854 2 1 windows.159 15222 x2.87 $input=windows.159 $shape=15222 #windows.159=(36,8,8,192)f32 #x2.87=(1,6,6,8,8,192)f32 prim::Constant pnnx_16057 0 1 25056 value=1 prim::Constant pnnx_16058 0 1 25057 value=-1 prim::ListConstruct pnnx_16059 4 1 25056 1729 1969 25057 15227 torch.permute torch.permute_2843 2 1 x2.87 15224 15225 $input=x2.87 $dims=15224 #x2.87=(1,6,6,8,8,192)f32 #15225=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_238 1 1 15225 15226 memory_format=torch.contiguous_format $input=15225 #15225=(1,6,8,6,8,192)f32 #15226=(1,6,8,6,8,192)f32 aten::mul pnnx_16061 2 1 H1.1 W1.1 15229 aten::Int pnnx_16062 1 1 15229 15230 prim::ListConstruct pnnx_16063 3 1 15119 15230 15123 15231 prim::Constant pnnx_16065 0 1 15233 value=None prim::Constant pnnx_16066 0 1 25058 value=1 Tensor.view Tensor.view_1855 2 1 15226 15227 x3.87 $input=15226 $shape=15227 #15226=(1,6,8,6,8,192)f32 #x3.87=(1,48,48,192)f32 Tensor.view Tensor.view_1856 2 1 x3.87 15231 x4.87 $input=x3.87 $shape=15231 #x3.87=(1,48,48,192)f32 #x4.87=(1,2304,192)f32 aten::add pnnx_16067 3 1 15083 x4.87 25058 input.361 #15083=(1,2304,192)f32 #x4.87=(1,2304,192)f32 #input.361=(1,2304,192)f32 nn.LayerNorm layers_mmsa.1.residual_group.blocks.0.norm2 1 1 input.361 15235 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.361=(1,2304,192)f32 #15235=(1,2304,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.0.mlp.fc1 1 1 15235 15240 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #15235=(1,2304,192)f32 #15240=(1,2304,384)f32 nn.GELU layers_mmsa.1.residual_group.blocks.0.mlp.act 1 1 15240 15241 #15240=(1,2304,384)f32 #15241=(1,2304,384)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.0.mlp.drop 1 1 15241 15242 #15241=(1,2304,384)f32 #15242=(1,2304,384)f32 nn.Linear layers_mmsa.1.residual_group.blocks.0.mlp.fc2 1 1 15242 15243 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #15242=(1,2304,384)f32 #15243=(1,2304,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.0.mlp.drop 1 1 15243 15244 #15243=(1,2304,192)f32 #15244=(1,2304,192)f32 prim::Constant pnnx_16068 0 1 15245 value=None prim::Constant pnnx_16069 0 1 25059 value=1 aten::add pnnx_16070 3 1 input.361 15244 25059 15246 #input.361=(1,2304,192)f32 #15244=(1,2304,192)f32 #15246=(1,2304,192)f32 prim::Constant pnnx_16071 0 1 15247 value=trunc prim::Constant pnnx_16072 0 1 15248 value=8 prim::Constant pnnx_16073 0 1 15249 value=0 prim::Constant pnnx_16074 0 1 15250 value=2 prim::Constant pnnx_16075 0 1 15251 value=-4 prim::Constant pnnx_16076 0 1 15252 value=1 prim::Constant pnnx_16077 0 1 15253 value=3 prim::Constant pnnx_16078 0 1 15254 value=8 prim::Constant pnnx_16079 0 1 15255 value=4 prim::Constant pnnx_16080 0 1 15256 value=5 prim::Constant pnnx_16081 0 1 15257 value=-1 prim::Constant pnnx_16082 0 1 15258 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.1 0 1 attn_mask.81 @attn_mask=(36,64,64)f32 #attn_mask.81=(36,64,64)f32 aten::size pnnx_16083 2 1 15246 15249 15265 #15246=(1,2304,192)f32 prim::NumToTensor pnnx_16084 1 1 15265 B.193 aten::Int pnnx_16085 1 1 B.193 15267 aten::Int pnnx_16086 1 1 B.193 15268 aten::size pnnx_16087 2 1 15246 15250 15269 #15246=(1,2304,192)f32 prim::NumToTensor pnnx_16088 1 1 15269 C.327 aten::Int pnnx_16089 1 1 C.327 15271 aten::Int pnnx_16090 1 1 C.327 15272 aten::Int pnnx_16091 1 1 C.327 15273 aten::Int pnnx_16092 1 1 C.327 15274 nn.LayerNorm layers_mmsa.1.residual_group.blocks.1.norm1 1 1 15246 15275 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #15246=(1,2304,192)f32 #15275=(1,2304,192)f32 prim::ListConstruct pnnx_16093 4 1 15268 1726 1966 15274 15276 prim::Constant pnnx_16095 0 1 25060 value=-4 prim::ListConstruct pnnx_16096 2 1 15251 25060 15278 prim::Constant pnnx_16097 0 1 25061 value=2 prim::ListConstruct pnnx_16098 2 1 15252 25061 15279 Tensor.view Tensor.view_1857 2 1 15275 15276 x.161 $input=15275 $shape=15276 #15275=(1,2304,192)f32 #x.161=(1,48,48,192)f32 prim::Constant pnnx_16100 0 1 25062 value=0 torch.roll torch.roll_2498 3 1 x.161 15278 15279 x0.89 $input=x.161 $shifts=15278 $dims=15279 #x.161=(1,48,48,192)f32 #x0.89=(1,48,48,192)f32 aten::size pnnx_16101 2 1 x0.89 25062 15281 #x0.89=(1,48,48,192)f32 prim::NumToTensor pnnx_16102 1 1 15281 B0.89 aten::Int pnnx_16103 1 1 B0.89 15283 prim::Constant pnnx_16104 0 1 25063 value=1 aten::size pnnx_16105 2 1 x0.89 25063 15284 #x0.89=(1,48,48,192)f32 prim::NumToTensor pnnx_16106 1 1 15284 15285 prim::Constant pnnx_16107 0 1 25064 value=2 aten::size pnnx_16108 2 1 x0.89 25064 15286 #x0.89=(1,48,48,192)f32 prim::NumToTensor pnnx_16109 1 1 15286 15287 aten::size pnnx_16110 2 1 x0.89 15253 15288 #x0.89=(1,48,48,192)f32 prim::NumToTensor pnnx_16111 1 1 15288 C0.89 aten::Int pnnx_16112 1 1 C0.89 15290 aten::Int pnnx_16113 1 1 C0.89 15291 aten::div pnnx_16114 3 1 15285 15248 15247 15292 aten::Int pnnx_16115 1 1 15292 15293 prim::Constant pnnx_16116 0 1 25065 value=8 prim::Constant pnnx_16117 0 1 25066 value=trunc aten::div pnnx_16118 3 1 15287 25065 25066 15294 aten::Int pnnx_16119 1 1 15294 15295 prim::Constant pnnx_16120 0 1 25067 value=8 prim::ListConstruct pnnx_16121 6 1 15283 15293 15254 15295 25067 15291 15296 prim::Constant pnnx_16123 0 1 25068 value=0 prim::Constant pnnx_16124 0 1 25069 value=1 prim::Constant pnnx_16125 0 1 25070 value=3 prim::Constant pnnx_16126 0 1 25071 value=2 prim::ListConstruct pnnx_16127 6 1 25068 25069 25070 25071 15255 15256 15298 Tensor.view Tensor.view_1858 2 1 x0.89 15296 x1.89 $input=x0.89 $shape=15296 #x0.89=(1,48,48,192)f32 #x1.89=(1,6,8,6,8,192)f32 prim::Constant pnnx_16131 0 1 25073 value=8 prim::Constant pnnx_16132 0 1 25074 value=8 prim::ListConstruct pnnx_16133 4 1 15257 25073 25074 15290 15301 torch.permute torch.permute_2844 2 1 x1.89 15298 15299 $input=x1.89 $dims=15298 #x1.89=(1,6,8,6,8,192)f32 #15299=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_239 1 1 15299 15300 memory_format=torch.contiguous_format $input=15299 #15299=(1,6,6,8,8,192)f32 #15300=(1,6,6,8,8,192)f32 prim::Constant pnnx_16135 0 1 25075 value=-1 prim::ListConstruct pnnx_16136 3 1 25075 15258 15273 15303 prim::Constant pnnx_16138 0 1 15305 value=1.767767e-01 prim::Constant pnnx_16139 0 1 15306 value=trunc prim::Constant pnnx_16140 0 1 15307 value=6 prim::Constant pnnx_16141 0 1 15308 value=0 prim::Constant pnnx_16142 0 1 15309 value=1 prim::Constant pnnx_16143 0 1 15310 value=2 prim::Constant pnnx_16144 0 1 15311 value=3 prim::Constant pnnx_16145 0 1 15312 value=6 prim::Constant pnnx_16146 0 1 15313 value=4 prim::Constant pnnx_16147 0 1 15314 value=-2 prim::Constant pnnx_16148 0 1 15315 value=-1 prim::Constant pnnx_16149 0 1 15316 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.1.attn 0 1 relative_position_bias_table.161 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.161=(225,6)f32 pnnx.Attribute layers_mmsa.1.residual_group.blocks.1.attn 0 1 relative_position_index.161 @relative_position_index=(64,64)i64 #relative_position_index.161=(64,64)i64 Tensor.view Tensor.view_1859 2 1 15300 15301 x_windows.161 $input=15300 $shape=15301 #15300=(1,6,6,8,8,192)f32 #x_windows.161=(36,8,8,192)f32 Tensor.view Tensor.view_1860 2 1 x_windows.161 15303 x2.89 $input=x_windows.161 $shape=15303 #x_windows.161=(36,8,8,192)f32 #x2.89=(36,64,192)f32 aten::size pnnx_16150 2 1 x2.89 15308 15324 #x2.89=(36,64,192)f32 prim::NumToTensor pnnx_16151 1 1 15324 B_.161 aten::Int pnnx_16152 1 1 B_.161 15326 aten::Int pnnx_16153 1 1 B_.161 15327 aten::size pnnx_16154 2 1 x2.89 15309 15328 #x2.89=(36,64,192)f32 prim::NumToTensor pnnx_16155 1 1 15328 N.161 aten::Int pnnx_16156 1 1 N.161 15330 aten::Int pnnx_16157 1 1 N.161 15331 aten::Int pnnx_16158 1 1 N.161 15332 aten::Int pnnx_16159 1 1 N.161 15333 aten::Int pnnx_16160 1 1 N.161 15334 aten::Int pnnx_16161 1 1 N.161 15335 aten::size pnnx_16162 2 1 x2.89 15310 15336 #x2.89=(36,64,192)f32 prim::NumToTensor pnnx_16163 1 1 15336 C.329 aten::Int pnnx_16164 1 1 C.329 15338 nn.Linear layers_mmsa.1.residual_group.blocks.1.attn.qkv 1 1 x2.89 15339 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.89=(36,64,192)f32 #15339=(36,64,576)f32 aten::div pnnx_16165 3 1 C.329 15307 15306 15340 aten::Int pnnx_16166 1 1 15340 15341 prim::ListConstruct pnnx_16167 5 1 15327 15335 15311 15312 15341 15342 prim::Constant pnnx_16169 0 1 25076 value=2 prim::Constant pnnx_16170 0 1 25077 value=0 prim::Constant pnnx_16171 0 1 25078 value=3 prim::Constant pnnx_16172 0 1 25079 value=1 prim::ListConstruct pnnx_16173 5 1 25076 25077 25078 25079 15313 15344 Tensor.reshape Tensor.reshape_592 2 1 15339 15342 15343 $input=15339 $shape=15342 #15339=(36,64,576)f32 #15343=(36,64,3,6,32)f32 prim::Constant pnnx_16175 0 1 25080 value=0 prim::Constant pnnx_16176 0 1 25081 value=0 prim::Constant pnnx_16178 0 1 25082 value=0 prim::Constant pnnx_16179 0 1 25083 value=1 prim::Constant pnnx_16181 0 1 25084 value=0 prim::Constant pnnx_16182 0 1 25085 value=2 torch.permute torch.permute_2845 2 1 15343 15344 qkv0.89 $input=15343 $dims=15344 #15343=(36,64,3,6,32)f32 #qkv0.89=(3,36,6,64,32)f32 Tensor.select Tensor.select_887 3 1 qkv0.89 25080 25081 q.161 $input=qkv0.89 $dim=25080 $index=25081 #qkv0.89=(3,36,6,64,32)f32 #q.161=(36,6,64,32)f32 aten::mul pnnx_16184 2 1 q.161 15305 q0.89 #q.161=(36,6,64,32)f32 #q0.89=(36,6,64,32)f32 Tensor.select Tensor.select_888 3 1 qkv0.89 25082 25083 k.161 $input=qkv0.89 $dim=25082 $index=25083 #qkv0.89=(3,36,6,64,32)f32 #k.161=(36,6,64,32)f32 prim::Constant pnnx_16187 0 1 25086 value=-1 prim::ListConstruct pnnx_16188 1 1 25086 15352 Tensor.view Tensor.view_1861 2 1 relative_position_index.161 15352 15353 $input=relative_position_index.161 $shape=15352 #relative_position_index.161=(64,64)i64 #15353=(4096)i64 prim::ListConstruct pnnx_16190 1 1 15353 15354 #15353=(4096)i64 prim::Constant pnnx_16192 0 1 25087 value=64 prim::Constant pnnx_16193 0 1 25088 value=-1 prim::ListConstruct pnnx_16194 3 1 15316 25087 25088 15356 Tensor.index Tensor.index_405 2 1 relative_position_bias_table.161 15354 15355 $input=relative_position_bias_table.161 $expr=15354 #relative_position_bias_table.161=(225,6)f32 #15355=(4096,6)f32 prim::Constant pnnx_16196 0 1 25089 value=2 prim::Constant pnnx_16197 0 1 25090 value=0 prim::Constant pnnx_16198 0 1 25091 value=1 prim::ListConstruct pnnx_16199 3 1 25089 25090 25091 15358 Tensor.view Tensor.view_1862 2 1 15355 15356 relative_position_bias.161 $input=15355 $shape=15356 #15355=(4096,6)f32 #relative_position_bias.161=(64,64,6)f32 prim::Constant pnnx_16203 0 1 25093 value=0 torch.permute torch.permute_2846 2 1 relative_position_bias.161 15358 15359 $input=relative_position_bias.161 $dims=15358 #relative_position_bias.161=(64,64,6)f32 #15359=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_240 1 1 15359 relative_position_bias0.89 memory_format=torch.contiguous_format $input=15359 #15359=(6,64,64)f32 #relative_position_bias0.89=(6,64,64)f32 prim::Constant pnnx_16205 0 1 25094 value=1 torch.transpose torch.transpose_3149 3 1 k.161 15314 15315 15350 $input=k.161 $dim0=15314 $dim1=15315 #k.161=(36,6,64,32)f32 #15350=(36,6,32,64)f32 torch.matmul torch.matmul_2362 2 1 q0.89 15350 attn.323 $input=q0.89 $other=15350 #q0.89=(36,6,64,32)f32 #15350=(36,6,32,64)f32 #attn.323=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3375 2 1 relative_position_bias0.89 25093 15361 $input=relative_position_bias0.89 $dim=25093 #relative_position_bias0.89=(6,64,64)f32 #15361=(1,6,64,64)f32 aten::add pnnx_16206 3 1 attn.323 15361 25094 attn0.45 #attn.323=(36,6,64,64)f32 #15361=(1,6,64,64)f32 #attn0.45=(36,6,64,64)f32 prim::Constant pnnx_16207 0 1 25095 value=0 aten::size pnnx_16208 2 1 attn_mask.81 25095 15363 #attn_mask.81=(36,64,64)f32 prim::NumToTensor pnnx_16209 1 1 15363 other.81 aten::Int pnnx_16210 1 1 other.81 15365 prim::Constant pnnx_16211 0 1 25096 value=trunc aten::div pnnx_16212 3 1 B_.161 other.81 25096 15366 aten::Int pnnx_16213 1 1 15366 15367 prim::Constant pnnx_16214 0 1 25097 value=6 prim::ListConstruct pnnx_16215 5 1 15367 15365 25097 15334 15333 15368 prim::Constant pnnx_16217 0 1 25098 value=1 prim::Constant pnnx_16219 0 1 25099 value=0 prim::Constant pnnx_16221 0 1 25100 value=1 Tensor.view Tensor.view_1863 2 1 attn0.45 15368 15369 $input=attn0.45 $shape=15368 #attn0.45=(36,6,64,64)f32 #15369=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3376 2 1 attn_mask.81 25098 15370 $input=attn_mask.81 $dim=25098 #attn_mask.81=(36,64,64)f32 #15370=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3377 2 1 15370 25099 15371 $input=15370 $dim=25099 #15370=(36,1,64,64)f32 #15371=(1,36,1,64,64)f32 aten::add pnnx_16222 3 1 15369 15371 25100 attn1.45 #15369=(1,36,6,64,64)f32 #15371=(1,36,1,64,64)f32 #attn1.45=(1,36,6,64,64)f32 prim::Constant pnnx_16223 0 1 25101 value=-1 prim::Constant pnnx_16224 0 1 25102 value=6 prim::ListConstruct pnnx_16225 4 1 25101 25102 15332 15331 15373 Tensor.view Tensor.view_1864 2 1 attn1.45 15373 input.363 $input=attn1.45 $shape=15373 #attn1.45=(1,36,6,64,64)f32 #input.363=(36,6,64,64)f32 nn.Softmax layers_mmsa.1.residual_group.blocks.1.attn.softmax 1 1 input.363 15375 dim=-1 #input.363=(36,6,64,64)f32 #15375=(36,6,64,64)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.1.attn.attn_drop 1 1 15375 15376 #15375=(36,6,64,64)f32 #15376=(36,6,64,64)f32 Tensor.select Tensor.select_889 3 1 qkv0.89 25084 25085 v.161 $input=qkv0.89 $dim=25084 $index=25085 #qkv0.89=(3,36,6,64,32)f32 #v.161=(36,6,64,32)f32 prim::Constant pnnx_16228 0 1 25103 value=1 prim::Constant pnnx_16229 0 1 25104 value=2 torch.matmul torch.matmul_2363 2 1 15376 v.161 15377 $input=15376 $other=v.161 #15376=(36,6,64,64)f32 #v.161=(36,6,64,32)f32 #15377=(36,6,64,32)f32 prim::ListConstruct pnnx_16231 3 1 15326 15330 15338 15379 torch.transpose torch.transpose_3150 3 1 15377 25103 25104 15378 $input=15377 $dim0=25103 $dim1=25104 #15377=(36,6,64,32)f32 #15378=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_593 2 1 15378 15379 input0.93 $input=15378 $shape=15379 #15378=(36,64,6,32)f32 #input0.93=(36,64,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.1.attn.proj 1 1 input0.93 15381 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.93=(36,64,192)f32 #15381=(36,64,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.1.attn.proj_drop 1 1 15381 15382 #15381=(36,64,192)f32 #15382=(36,64,192)f32 prim::Constant pnnx_16233 0 1 25105 value=-1 prim::Constant pnnx_16234 0 1 25106 value=8 prim::Constant pnnx_16235 0 1 25107 value=8 prim::ListConstruct pnnx_16236 4 1 25105 25106 25107 15272 15383 prim::Constant pnnx_16238 0 1 25108 value=8 prim::Constant pnnx_16239 0 1 25109 value=trunc aten::div pnnx_16240 3 1 H1.1 25108 25109 15385 aten::Int pnnx_16241 1 1 15385 15386 prim::Constant pnnx_16242 0 1 25110 value=8 prim::Constant pnnx_16243 0 1 25111 value=trunc aten::div pnnx_16244 3 1 W1.1 25110 25111 15387 aten::Int pnnx_16245 1 1 15387 15388 prim::Constant pnnx_16246 0 1 25112 value=1 prim::Constant pnnx_16247 0 1 25113 value=8 prim::Constant pnnx_16248 0 1 25114 value=8 prim::Constant pnnx_16249 0 1 25115 value=-1 prim::ListConstruct pnnx_16250 6 1 25112 15386 15388 25113 25114 25115 15389 prim::Constant pnnx_16252 0 1 25116 value=0 prim::Constant pnnx_16253 0 1 25117 value=1 prim::Constant pnnx_16254 0 1 25118 value=3 prim::Constant pnnx_16255 0 1 25119 value=2 prim::Constant pnnx_16256 0 1 25120 value=4 prim::Constant pnnx_16257 0 1 25121 value=5 prim::ListConstruct pnnx_16258 6 1 25116 25117 25118 25119 25120 25121 15391 Tensor.view Tensor.view_1865 2 1 15382 15383 windows.161 $input=15382 $shape=15383 #15382=(36,64,192)f32 #windows.161=(36,8,8,192)f32 Tensor.view Tensor.view_1866 2 1 windows.161 15389 x3.89 $input=windows.161 $shape=15389 #windows.161=(36,8,8,192)f32 #x3.89=(1,6,6,8,8,192)f32 prim::Constant pnnx_16262 0 1 25123 value=1 prim::Constant pnnx_16263 0 1 25124 value=-1 prim::ListConstruct pnnx_16264 4 1 25123 1723 1963 25124 15394 torch.permute torch.permute_2847 2 1 x3.89 15391 15392 $input=x3.89 $dims=15391 #x3.89=(1,6,6,8,8,192)f32 #15392=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_241 1 1 15392 15393 memory_format=torch.contiguous_format $input=15392 #15392=(1,6,8,6,8,192)f32 #15393=(1,6,8,6,8,192)f32 prim::Constant pnnx_16266 0 1 25125 value=4 prim::Constant pnnx_16267 0 1 25126 value=4 prim::ListConstruct pnnx_16268 2 1 25125 25126 15396 prim::Constant pnnx_16269 0 1 25127 value=1 prim::Constant pnnx_16270 0 1 25128 value=2 prim::ListConstruct pnnx_16271 2 1 25127 25128 15397 Tensor.view Tensor.view_1867 2 1 15393 15394 shifted_x.81 $input=15393 $shape=15394 #15393=(1,6,8,6,8,192)f32 #shifted_x.81=(1,48,48,192)f32 aten::mul pnnx_16273 2 1 H1.1 W1.1 15399 aten::Int pnnx_16274 1 1 15399 15400 prim::ListConstruct pnnx_16275 3 1 15267 15400 15271 15401 prim::Constant pnnx_16277 0 1 15403 value=None prim::Constant pnnx_16278 0 1 25129 value=1 torch.roll torch.roll_2499 3 1 shifted_x.81 15396 15397 x4.89 $input=shifted_x.81 $shifts=15396 $dims=15397 #shifted_x.81=(1,48,48,192)f32 #x4.89=(1,48,48,192)f32 Tensor.view Tensor.view_1868 2 1 x4.89 15401 x5.81 $input=x4.89 $shape=15401 #x4.89=(1,48,48,192)f32 #x5.81=(1,2304,192)f32 aten::add pnnx_16279 3 1 15246 x5.81 25129 input.365 #15246=(1,2304,192)f32 #x5.81=(1,2304,192)f32 #input.365=(1,2304,192)f32 nn.LayerNorm layers_mmsa.1.residual_group.blocks.1.norm2 1 1 input.365 15405 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.365=(1,2304,192)f32 #15405=(1,2304,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.1.mlp.fc1 1 1 15405 15410 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #15405=(1,2304,192)f32 #15410=(1,2304,384)f32 nn.GELU layers_mmsa.1.residual_group.blocks.1.mlp.act 1 1 15410 15411 #15410=(1,2304,384)f32 #15411=(1,2304,384)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.1.mlp.drop 1 1 15411 15412 #15411=(1,2304,384)f32 #15412=(1,2304,384)f32 nn.Linear layers_mmsa.1.residual_group.blocks.1.mlp.fc2 1 1 15412 15413 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #15412=(1,2304,384)f32 #15413=(1,2304,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.1.mlp.drop 1 1 15413 15414 #15413=(1,2304,192)f32 #15414=(1,2304,192)f32 prim::Constant pnnx_16280 0 1 15415 value=None prim::Constant pnnx_16281 0 1 25130 value=1 aten::add pnnx_16282 3 1 input.365 15414 25130 15416 #input.365=(1,2304,192)f32 #15414=(1,2304,192)f32 #15416=(1,2304,192)f32 prim::Constant pnnx_16283 0 1 15417 value=trunc prim::Constant pnnx_16284 0 1 15418 value=8 prim::Constant pnnx_16285 0 1 15419 value=0 prim::Constant pnnx_16286 0 1 15420 value=2 prim::Constant pnnx_16287 0 1 15421 value=1 prim::Constant pnnx_16288 0 1 15422 value=3 prim::Constant pnnx_16289 0 1 15423 value=8 prim::Constant pnnx_16290 0 1 15424 value=4 prim::Constant pnnx_16291 0 1 15425 value=5 prim::Constant pnnx_16292 0 1 15426 value=-1 prim::Constant pnnx_16293 0 1 15427 value=64 aten::size pnnx_16294 2 1 15416 15419 15433 #15416=(1,2304,192)f32 prim::NumToTensor pnnx_16295 1 1 15433 B.195 aten::Int pnnx_16296 1 1 B.195 15435 aten::Int pnnx_16297 1 1 B.195 15436 aten::size pnnx_16298 2 1 15416 15420 15437 #15416=(1,2304,192)f32 prim::NumToTensor pnnx_16299 1 1 15437 C.331 aten::Int pnnx_16300 1 1 C.331 15439 aten::Int pnnx_16301 1 1 C.331 15440 aten::Int pnnx_16302 1 1 C.331 15441 aten::Int pnnx_16303 1 1 C.331 15442 nn.LayerNorm layers_mmsa.1.residual_group.blocks.2.norm1 1 1 15416 15443 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #15416=(1,2304,192)f32 #15443=(1,2304,192)f32 prim::ListConstruct pnnx_16304 4 1 15436 1720 1960 15442 15444 prim::Constant pnnx_16306 0 1 25131 value=0 Tensor.view Tensor.view_1869 2 1 15443 15444 x.163 $input=15443 $shape=15444 #15443=(1,2304,192)f32 #x.163=(1,48,48,192)f32 aten::size pnnx_16307 2 1 x.163 25131 15446 #x.163=(1,48,48,192)f32 prim::NumToTensor pnnx_16308 1 1 15446 B0.91 aten::Int pnnx_16309 1 1 B0.91 15448 aten::size pnnx_16310 2 1 x.163 15421 15449 #x.163=(1,48,48,192)f32 prim::NumToTensor pnnx_16311 1 1 15449 15450 prim::Constant pnnx_16312 0 1 25132 value=2 aten::size pnnx_16313 2 1 x.163 25132 15451 #x.163=(1,48,48,192)f32 prim::NumToTensor pnnx_16314 1 1 15451 15452 aten::size pnnx_16315 2 1 x.163 15422 15453 #x.163=(1,48,48,192)f32 prim::NumToTensor pnnx_16316 1 1 15453 C0.91 aten::Int pnnx_16317 1 1 C0.91 15455 aten::Int pnnx_16318 1 1 C0.91 15456 aten::div pnnx_16319 3 1 15450 15418 15417 15457 aten::Int pnnx_16320 1 1 15457 15458 prim::Constant pnnx_16321 0 1 25133 value=8 prim::Constant pnnx_16322 0 1 25134 value=trunc aten::div pnnx_16323 3 1 15452 25133 25134 15459 aten::Int pnnx_16324 1 1 15459 15460 prim::Constant pnnx_16325 0 1 25135 value=8 prim::ListConstruct pnnx_16326 6 1 15448 15458 15423 15460 25135 15456 15461 prim::Constant pnnx_16328 0 1 25136 value=0 prim::Constant pnnx_16329 0 1 25137 value=1 prim::Constant pnnx_16330 0 1 25138 value=3 prim::Constant pnnx_16331 0 1 25139 value=2 prim::ListConstruct pnnx_16332 6 1 25136 25137 25138 25139 15424 15425 15463 Tensor.view Tensor.view_1870 2 1 x.163 15461 x0.91 $input=x.163 $shape=15461 #x.163=(1,48,48,192)f32 #x0.91=(1,6,8,6,8,192)f32 prim::Constant pnnx_16336 0 1 25141 value=8 prim::Constant pnnx_16337 0 1 25142 value=8 prim::ListConstruct pnnx_16338 4 1 15426 25141 25142 15455 15466 torch.permute torch.permute_2848 2 1 x0.91 15463 15464 $input=x0.91 $dims=15463 #x0.91=(1,6,8,6,8,192)f32 #15464=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_242 1 1 15464 15465 memory_format=torch.contiguous_format $input=15464 #15464=(1,6,6,8,8,192)f32 #15465=(1,6,6,8,8,192)f32 prim::Constant pnnx_16340 0 1 25143 value=-1 prim::ListConstruct pnnx_16341 3 1 25143 15427 15441 15468 prim::Constant pnnx_16343 0 1 15470 value=1.767767e-01 prim::Constant pnnx_16344 0 1 15471 value=trunc prim::Constant pnnx_16345 0 1 15472 value=6 prim::Constant pnnx_16346 0 1 15473 value=0 prim::Constant pnnx_16347 0 1 15474 value=1 prim::Constant pnnx_16348 0 1 15475 value=2 prim::Constant pnnx_16349 0 1 15476 value=3 prim::Constant pnnx_16350 0 1 15477 value=6 prim::Constant pnnx_16351 0 1 15478 value=4 prim::Constant pnnx_16352 0 1 15479 value=-2 prim::Constant pnnx_16353 0 1 15480 value=-1 prim::Constant pnnx_16354 0 1 15481 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.2.attn 0 1 relative_position_bias_table.163 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.163=(225,6)f32 pnnx.Attribute layers_mmsa.1.residual_group.blocks.2.attn 0 1 relative_position_index.163 @relative_position_index=(64,64)i64 #relative_position_index.163=(64,64)i64 Tensor.view Tensor.view_1871 2 1 15465 15466 x_windows.163 $input=15465 $shape=15466 #15465=(1,6,6,8,8,192)f32 #x_windows.163=(36,8,8,192)f32 Tensor.view Tensor.view_1872 2 1 x_windows.163 15468 x1.91 $input=x_windows.163 $shape=15468 #x_windows.163=(36,8,8,192)f32 #x1.91=(36,64,192)f32 aten::size pnnx_16355 2 1 x1.91 15473 15489 #x1.91=(36,64,192)f32 prim::NumToTensor pnnx_16356 1 1 15489 B_.163 aten::Int pnnx_16357 1 1 B_.163 15491 aten::Int pnnx_16358 1 1 B_.163 15492 aten::size pnnx_16359 2 1 x1.91 15474 15493 #x1.91=(36,64,192)f32 prim::NumToTensor pnnx_16360 1 1 15493 N.163 aten::Int pnnx_16361 1 1 N.163 15495 aten::Int pnnx_16362 1 1 N.163 15496 aten::size pnnx_16363 2 1 x1.91 15475 15497 #x1.91=(36,64,192)f32 prim::NumToTensor pnnx_16364 1 1 15497 C.333 aten::Int pnnx_16365 1 1 C.333 15499 nn.Linear layers_mmsa.1.residual_group.blocks.2.attn.qkv 1 1 x1.91 15500 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.91=(36,64,192)f32 #15500=(36,64,576)f32 aten::div pnnx_16366 3 1 C.333 15472 15471 15501 aten::Int pnnx_16367 1 1 15501 15502 prim::ListConstruct pnnx_16368 5 1 15492 15496 15476 15477 15502 15503 prim::Constant pnnx_16370 0 1 25144 value=2 prim::Constant pnnx_16371 0 1 25145 value=0 prim::Constant pnnx_16372 0 1 25146 value=3 prim::Constant pnnx_16373 0 1 25147 value=1 prim::ListConstruct pnnx_16374 5 1 25144 25145 25146 25147 15478 15505 Tensor.reshape Tensor.reshape_594 2 1 15500 15503 15504 $input=15500 $shape=15503 #15500=(36,64,576)f32 #15504=(36,64,3,6,32)f32 prim::Constant pnnx_16376 0 1 25148 value=0 prim::Constant pnnx_16377 0 1 25149 value=0 prim::Constant pnnx_16379 0 1 25150 value=0 prim::Constant pnnx_16380 0 1 25151 value=1 prim::Constant pnnx_16382 0 1 25152 value=0 prim::Constant pnnx_16383 0 1 25153 value=2 torch.permute torch.permute_2849 2 1 15504 15505 qkv0.91 $input=15504 $dims=15505 #15504=(36,64,3,6,32)f32 #qkv0.91=(3,36,6,64,32)f32 Tensor.select Tensor.select_890 3 1 qkv0.91 25148 25149 q.163 $input=qkv0.91 $dim=25148 $index=25149 #qkv0.91=(3,36,6,64,32)f32 #q.163=(36,6,64,32)f32 aten::mul pnnx_16385 2 1 q.163 15470 q0.91 #q.163=(36,6,64,32)f32 #q0.91=(36,6,64,32)f32 Tensor.select Tensor.select_891 3 1 qkv0.91 25150 25151 k.163 $input=qkv0.91 $dim=25150 $index=25151 #qkv0.91=(3,36,6,64,32)f32 #k.163=(36,6,64,32)f32 prim::Constant pnnx_16388 0 1 25154 value=-1 prim::ListConstruct pnnx_16389 1 1 25154 15513 Tensor.view Tensor.view_1873 2 1 relative_position_index.163 15513 15514 $input=relative_position_index.163 $shape=15513 #relative_position_index.163=(64,64)i64 #15514=(4096)i64 prim::ListConstruct pnnx_16391 1 1 15514 15515 #15514=(4096)i64 prim::Constant pnnx_16393 0 1 25155 value=64 prim::Constant pnnx_16394 0 1 25156 value=-1 prim::ListConstruct pnnx_16395 3 1 15481 25155 25156 15517 Tensor.index Tensor.index_406 2 1 relative_position_bias_table.163 15515 15516 $input=relative_position_bias_table.163 $expr=15515 #relative_position_bias_table.163=(225,6)f32 #15516=(4096,6)f32 prim::Constant pnnx_16397 0 1 25157 value=2 prim::Constant pnnx_16398 0 1 25158 value=0 prim::Constant pnnx_16399 0 1 25159 value=1 prim::ListConstruct pnnx_16400 3 1 25157 25158 25159 15519 Tensor.view Tensor.view_1874 2 1 15516 15517 relative_position_bias.163 $input=15516 $shape=15517 #15516=(4096,6)f32 #relative_position_bias.163=(64,64,6)f32 prim::Constant pnnx_16404 0 1 25161 value=0 torch.permute torch.permute_2850 2 1 relative_position_bias.163 15519 15520 $input=relative_position_bias.163 $dims=15519 #relative_position_bias.163=(64,64,6)f32 #15520=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_243 1 1 15520 relative_position_bias0.91 memory_format=torch.contiguous_format $input=15520 #15520=(6,64,64)f32 #relative_position_bias0.91=(6,64,64)f32 prim::Constant pnnx_16406 0 1 25162 value=1 torch.transpose torch.transpose_3151 3 1 k.163 15479 15480 15511 $input=k.163 $dim0=15479 $dim1=15480 #k.163=(36,6,64,32)f32 #15511=(36,6,32,64)f32 torch.matmul torch.matmul_2364 2 1 q0.91 15511 attn.327 $input=q0.91 $other=15511 #q0.91=(36,6,64,32)f32 #15511=(36,6,32,64)f32 #attn.327=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3378 2 1 relative_position_bias0.91 25161 15522 $input=relative_position_bias0.91 $dim=25161 #relative_position_bias0.91=(6,64,64)f32 #15522=(1,6,64,64)f32 aten::add pnnx_16407 3 1 attn.327 15522 25162 input.367 #attn.327=(36,6,64,64)f32 #15522=(1,6,64,64)f32 #input.367=(36,6,64,64)f32 nn.Softmax layers_mmsa.1.residual_group.blocks.2.attn.softmax 1 1 input.367 15524 dim=-1 #input.367=(36,6,64,64)f32 #15524=(36,6,64,64)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.2.attn.attn_drop 1 1 15524 15525 #15524=(36,6,64,64)f32 #15525=(36,6,64,64)f32 Tensor.select Tensor.select_892 3 1 qkv0.91 25152 25153 v.163 $input=qkv0.91 $dim=25152 $index=25153 #qkv0.91=(3,36,6,64,32)f32 #v.163=(36,6,64,32)f32 prim::Constant pnnx_16409 0 1 25163 value=1 prim::Constant pnnx_16410 0 1 25164 value=2 torch.matmul torch.matmul_2365 2 1 15525 v.163 15526 $input=15525 $other=v.163 #15525=(36,6,64,64)f32 #v.163=(36,6,64,32)f32 #15526=(36,6,64,32)f32 prim::ListConstruct pnnx_16412 3 1 15491 15495 15499 15528 torch.transpose torch.transpose_3152 3 1 15526 25163 25164 15527 $input=15526 $dim0=25163 $dim1=25164 #15526=(36,6,64,32)f32 #15527=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_595 2 1 15527 15528 input0.95 $input=15527 $shape=15528 #15527=(36,64,6,32)f32 #input0.95=(36,64,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.2.attn.proj 1 1 input0.95 15530 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.95=(36,64,192)f32 #15530=(36,64,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.2.attn.proj_drop 1 1 15530 15531 #15530=(36,64,192)f32 #15531=(36,64,192)f32 prim::Constant pnnx_16414 0 1 25165 value=-1 prim::Constant pnnx_16415 0 1 25166 value=8 prim::Constant pnnx_16416 0 1 25167 value=8 prim::ListConstruct pnnx_16417 4 1 25165 25166 25167 15440 15532 prim::Constant pnnx_16419 0 1 25168 value=8 prim::Constant pnnx_16420 0 1 25169 value=trunc aten::div pnnx_16421 3 1 H1.1 25168 25169 15534 aten::Int pnnx_16422 1 1 15534 15535 prim::Constant pnnx_16423 0 1 25170 value=8 prim::Constant pnnx_16424 0 1 25171 value=trunc aten::div pnnx_16425 3 1 W1.1 25170 25171 15536 aten::Int pnnx_16426 1 1 15536 15537 prim::Constant pnnx_16427 0 1 25172 value=1 prim::Constant pnnx_16428 0 1 25173 value=8 prim::Constant pnnx_16429 0 1 25174 value=8 prim::Constant pnnx_16430 0 1 25175 value=-1 prim::ListConstruct pnnx_16431 6 1 25172 15535 15537 25173 25174 25175 15538 prim::Constant pnnx_16433 0 1 25176 value=0 prim::Constant pnnx_16434 0 1 25177 value=1 prim::Constant pnnx_16435 0 1 25178 value=3 prim::Constant pnnx_16436 0 1 25179 value=2 prim::Constant pnnx_16437 0 1 25180 value=4 prim::Constant pnnx_16438 0 1 25181 value=5 prim::ListConstruct pnnx_16439 6 1 25176 25177 25178 25179 25180 25181 15540 Tensor.view Tensor.view_1875 2 1 15531 15532 windows.163 $input=15531 $shape=15532 #15531=(36,64,192)f32 #windows.163=(36,8,8,192)f32 Tensor.view Tensor.view_1876 2 1 windows.163 15538 x2.91 $input=windows.163 $shape=15538 #windows.163=(36,8,8,192)f32 #x2.91=(1,6,6,8,8,192)f32 prim::Constant pnnx_16443 0 1 25183 value=1 prim::Constant pnnx_16444 0 1 25184 value=-1 prim::ListConstruct pnnx_16445 4 1 25183 1717 1957 25184 15543 torch.permute torch.permute_2851 2 1 x2.91 15540 15541 $input=x2.91 $dims=15540 #x2.91=(1,6,6,8,8,192)f32 #15541=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_244 1 1 15541 15542 memory_format=torch.contiguous_format $input=15541 #15541=(1,6,8,6,8,192)f32 #15542=(1,6,8,6,8,192)f32 aten::mul pnnx_16447 2 1 H1.1 W1.1 15545 aten::Int pnnx_16448 1 1 15545 15546 prim::ListConstruct pnnx_16449 3 1 15435 15546 15439 15547 prim::Constant pnnx_16451 0 1 15549 value=None prim::Constant pnnx_16452 0 1 25185 value=1 Tensor.view Tensor.view_1877 2 1 15542 15543 x3.91 $input=15542 $shape=15543 #15542=(1,6,8,6,8,192)f32 #x3.91=(1,48,48,192)f32 Tensor.view Tensor.view_1878 2 1 x3.91 15547 x4.91 $input=x3.91 $shape=15547 #x3.91=(1,48,48,192)f32 #x4.91=(1,2304,192)f32 aten::add pnnx_16453 3 1 15416 x4.91 25185 input.369 #15416=(1,2304,192)f32 #x4.91=(1,2304,192)f32 #input.369=(1,2304,192)f32 nn.LayerNorm layers_mmsa.1.residual_group.blocks.2.norm2 1 1 input.369 15551 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.369=(1,2304,192)f32 #15551=(1,2304,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.2.mlp.fc1 1 1 15551 15556 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #15551=(1,2304,192)f32 #15556=(1,2304,384)f32 nn.GELU layers_mmsa.1.residual_group.blocks.2.mlp.act 1 1 15556 15557 #15556=(1,2304,384)f32 #15557=(1,2304,384)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.2.mlp.drop 1 1 15557 15558 #15557=(1,2304,384)f32 #15558=(1,2304,384)f32 nn.Linear layers_mmsa.1.residual_group.blocks.2.mlp.fc2 1 1 15558 15559 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #15558=(1,2304,384)f32 #15559=(1,2304,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.2.mlp.drop 1 1 15559 15560 #15559=(1,2304,192)f32 #15560=(1,2304,192)f32 prim::Constant pnnx_16454 0 1 15561 value=None prim::Constant pnnx_16455 0 1 25186 value=1 aten::add pnnx_16456 3 1 input.369 15560 25186 15562 #input.369=(1,2304,192)f32 #15560=(1,2304,192)f32 #15562=(1,2304,192)f32 prim::Constant pnnx_16457 0 1 15563 value=trunc prim::Constant pnnx_16458 0 1 15564 value=8 prim::Constant pnnx_16459 0 1 15565 value=0 prim::Constant pnnx_16460 0 1 15566 value=2 prim::Constant pnnx_16461 0 1 15567 value=-4 prim::Constant pnnx_16462 0 1 15568 value=1 prim::Constant pnnx_16463 0 1 15569 value=3 prim::Constant pnnx_16464 0 1 15570 value=8 prim::Constant pnnx_16465 0 1 15571 value=4 prim::Constant pnnx_16466 0 1 15572 value=5 prim::Constant pnnx_16467 0 1 15573 value=-1 prim::Constant pnnx_16468 0 1 15574 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.3 0 1 attn_mask.83 @attn_mask=(36,64,64)f32 #attn_mask.83=(36,64,64)f32 aten::size pnnx_16469 2 1 15562 15565 15581 #15562=(1,2304,192)f32 prim::NumToTensor pnnx_16470 1 1 15581 B.197 aten::Int pnnx_16471 1 1 B.197 15583 aten::Int pnnx_16472 1 1 B.197 15584 aten::size pnnx_16473 2 1 15562 15566 15585 #15562=(1,2304,192)f32 prim::NumToTensor pnnx_16474 1 1 15585 C.335 aten::Int pnnx_16475 1 1 C.335 15587 aten::Int pnnx_16476 1 1 C.335 15588 aten::Int pnnx_16477 1 1 C.335 15589 aten::Int pnnx_16478 1 1 C.335 15590 nn.LayerNorm layers_mmsa.1.residual_group.blocks.3.norm1 1 1 15562 15591 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #15562=(1,2304,192)f32 #15591=(1,2304,192)f32 prim::ListConstruct pnnx_16479 4 1 15584 1714 1954 15590 15592 prim::Constant pnnx_16481 0 1 25187 value=-4 prim::ListConstruct pnnx_16482 2 1 15567 25187 15594 prim::Constant pnnx_16483 0 1 25188 value=2 prim::ListConstruct pnnx_16484 2 1 15568 25188 15595 Tensor.view Tensor.view_1879 2 1 15591 15592 x.165 $input=15591 $shape=15592 #15591=(1,2304,192)f32 #x.165=(1,48,48,192)f32 prim::Constant pnnx_16486 0 1 25189 value=0 torch.roll torch.roll_2500 3 1 x.165 15594 15595 x0.93 $input=x.165 $shifts=15594 $dims=15595 #x.165=(1,48,48,192)f32 #x0.93=(1,48,48,192)f32 aten::size pnnx_16487 2 1 x0.93 25189 15597 #x0.93=(1,48,48,192)f32 prim::NumToTensor pnnx_16488 1 1 15597 B0.93 aten::Int pnnx_16489 1 1 B0.93 15599 prim::Constant pnnx_16490 0 1 25190 value=1 aten::size pnnx_16491 2 1 x0.93 25190 15600 #x0.93=(1,48,48,192)f32 prim::NumToTensor pnnx_16492 1 1 15600 15601 prim::Constant pnnx_16493 0 1 25191 value=2 aten::size pnnx_16494 2 1 x0.93 25191 15602 #x0.93=(1,48,48,192)f32 prim::NumToTensor pnnx_16495 1 1 15602 15603 aten::size pnnx_16496 2 1 x0.93 15569 15604 #x0.93=(1,48,48,192)f32 prim::NumToTensor pnnx_16497 1 1 15604 C0.93 aten::Int pnnx_16498 1 1 C0.93 15606 aten::Int pnnx_16499 1 1 C0.93 15607 aten::div pnnx_16500 3 1 15601 15564 15563 15608 aten::Int pnnx_16501 1 1 15608 15609 prim::Constant pnnx_16502 0 1 25192 value=8 prim::Constant pnnx_16503 0 1 25193 value=trunc aten::div pnnx_16504 3 1 15603 25192 25193 15610 aten::Int pnnx_16505 1 1 15610 15611 prim::Constant pnnx_16506 0 1 25194 value=8 prim::ListConstruct pnnx_16507 6 1 15599 15609 15570 15611 25194 15607 15612 prim::Constant pnnx_16509 0 1 25195 value=0 prim::Constant pnnx_16510 0 1 25196 value=1 prim::Constant pnnx_16511 0 1 25197 value=3 prim::Constant pnnx_16512 0 1 25198 value=2 prim::ListConstruct pnnx_16513 6 1 25195 25196 25197 25198 15571 15572 15614 Tensor.view Tensor.view_1880 2 1 x0.93 15612 x1.93 $input=x0.93 $shape=15612 #x0.93=(1,48,48,192)f32 #x1.93=(1,6,8,6,8,192)f32 prim::Constant pnnx_16517 0 1 25200 value=8 prim::Constant pnnx_16518 0 1 25201 value=8 prim::ListConstruct pnnx_16519 4 1 15573 25200 25201 15606 15617 torch.permute torch.permute_2852 2 1 x1.93 15614 15615 $input=x1.93 $dims=15614 #x1.93=(1,6,8,6,8,192)f32 #15615=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_245 1 1 15615 15616 memory_format=torch.contiguous_format $input=15615 #15615=(1,6,6,8,8,192)f32 #15616=(1,6,6,8,8,192)f32 prim::Constant pnnx_16521 0 1 25202 value=-1 prim::ListConstruct pnnx_16522 3 1 25202 15574 15589 15619 prim::Constant pnnx_16524 0 1 15621 value=1.767767e-01 prim::Constant pnnx_16525 0 1 15622 value=trunc prim::Constant pnnx_16526 0 1 15623 value=6 prim::Constant pnnx_16527 0 1 15624 value=0 prim::Constant pnnx_16528 0 1 15625 value=1 prim::Constant pnnx_16529 0 1 15626 value=2 prim::Constant pnnx_16530 0 1 15627 value=3 prim::Constant pnnx_16531 0 1 15628 value=6 prim::Constant pnnx_16532 0 1 15629 value=4 prim::Constant pnnx_16533 0 1 15630 value=-2 prim::Constant pnnx_16534 0 1 15631 value=-1 prim::Constant pnnx_16535 0 1 15632 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.3.attn 0 1 relative_position_bias_table.165 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.165=(225,6)f32 pnnx.Attribute layers_mmsa.1.residual_group.blocks.3.attn 0 1 relative_position_index.165 @relative_position_index=(64,64)i64 #relative_position_index.165=(64,64)i64 Tensor.view Tensor.view_1881 2 1 15616 15617 x_windows.165 $input=15616 $shape=15617 #15616=(1,6,6,8,8,192)f32 #x_windows.165=(36,8,8,192)f32 Tensor.view Tensor.view_1882 2 1 x_windows.165 15619 x2.93 $input=x_windows.165 $shape=15619 #x_windows.165=(36,8,8,192)f32 #x2.93=(36,64,192)f32 aten::size pnnx_16536 2 1 x2.93 15624 15640 #x2.93=(36,64,192)f32 prim::NumToTensor pnnx_16537 1 1 15640 B_.165 aten::Int pnnx_16538 1 1 B_.165 15642 aten::Int pnnx_16539 1 1 B_.165 15643 aten::size pnnx_16540 2 1 x2.93 15625 15644 #x2.93=(36,64,192)f32 prim::NumToTensor pnnx_16541 1 1 15644 N.165 aten::Int pnnx_16542 1 1 N.165 15646 aten::Int pnnx_16543 1 1 N.165 15647 aten::Int pnnx_16544 1 1 N.165 15648 aten::Int pnnx_16545 1 1 N.165 15649 aten::Int pnnx_16546 1 1 N.165 15650 aten::Int pnnx_16547 1 1 N.165 15651 aten::size pnnx_16548 2 1 x2.93 15626 15652 #x2.93=(36,64,192)f32 prim::NumToTensor pnnx_16549 1 1 15652 C.337 aten::Int pnnx_16550 1 1 C.337 15654 nn.Linear layers_mmsa.1.residual_group.blocks.3.attn.qkv 1 1 x2.93 15655 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.93=(36,64,192)f32 #15655=(36,64,576)f32 aten::div pnnx_16551 3 1 C.337 15623 15622 15656 aten::Int pnnx_16552 1 1 15656 15657 prim::ListConstruct pnnx_16553 5 1 15643 15651 15627 15628 15657 15658 prim::Constant pnnx_16555 0 1 25203 value=2 prim::Constant pnnx_16556 0 1 25204 value=0 prim::Constant pnnx_16557 0 1 25205 value=3 prim::Constant pnnx_16558 0 1 25206 value=1 prim::ListConstruct pnnx_16559 5 1 25203 25204 25205 25206 15629 15660 Tensor.reshape Tensor.reshape_596 2 1 15655 15658 15659 $input=15655 $shape=15658 #15655=(36,64,576)f32 #15659=(36,64,3,6,32)f32 prim::Constant pnnx_16561 0 1 25207 value=0 prim::Constant pnnx_16562 0 1 25208 value=0 prim::Constant pnnx_16564 0 1 25209 value=0 prim::Constant pnnx_16565 0 1 25210 value=1 prim::Constant pnnx_16567 0 1 25211 value=0 prim::Constant pnnx_16568 0 1 25212 value=2 torch.permute torch.permute_2853 2 1 15659 15660 qkv0.93 $input=15659 $dims=15660 #15659=(36,64,3,6,32)f32 #qkv0.93=(3,36,6,64,32)f32 Tensor.select Tensor.select_893 3 1 qkv0.93 25207 25208 q.165 $input=qkv0.93 $dim=25207 $index=25208 #qkv0.93=(3,36,6,64,32)f32 #q.165=(36,6,64,32)f32 aten::mul pnnx_16570 2 1 q.165 15621 q0.93 #q.165=(36,6,64,32)f32 #q0.93=(36,6,64,32)f32 Tensor.select Tensor.select_894 3 1 qkv0.93 25209 25210 k.165 $input=qkv0.93 $dim=25209 $index=25210 #qkv0.93=(3,36,6,64,32)f32 #k.165=(36,6,64,32)f32 prim::Constant pnnx_16573 0 1 25213 value=-1 prim::ListConstruct pnnx_16574 1 1 25213 15668 Tensor.view Tensor.view_1883 2 1 relative_position_index.165 15668 15669 $input=relative_position_index.165 $shape=15668 #relative_position_index.165=(64,64)i64 #15669=(4096)i64 prim::ListConstruct pnnx_16576 1 1 15669 15670 #15669=(4096)i64 prim::Constant pnnx_16578 0 1 25214 value=64 prim::Constant pnnx_16579 0 1 25215 value=-1 prim::ListConstruct pnnx_16580 3 1 15632 25214 25215 15672 Tensor.index Tensor.index_407 2 1 relative_position_bias_table.165 15670 15671 $input=relative_position_bias_table.165 $expr=15670 #relative_position_bias_table.165=(225,6)f32 #15671=(4096,6)f32 prim::Constant pnnx_16582 0 1 25216 value=2 prim::Constant pnnx_16583 0 1 25217 value=0 prim::Constant pnnx_16584 0 1 25218 value=1 prim::ListConstruct pnnx_16585 3 1 25216 25217 25218 15674 Tensor.view Tensor.view_1884 2 1 15671 15672 relative_position_bias.165 $input=15671 $shape=15672 #15671=(4096,6)f32 #relative_position_bias.165=(64,64,6)f32 prim::Constant pnnx_16589 0 1 25220 value=0 torch.permute torch.permute_2854 2 1 relative_position_bias.165 15674 15675 $input=relative_position_bias.165 $dims=15674 #relative_position_bias.165=(64,64,6)f32 #15675=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_246 1 1 15675 relative_position_bias0.93 memory_format=torch.contiguous_format $input=15675 #15675=(6,64,64)f32 #relative_position_bias0.93=(6,64,64)f32 prim::Constant pnnx_16591 0 1 25221 value=1 torch.transpose torch.transpose_3153 3 1 k.165 15630 15631 15666 $input=k.165 $dim0=15630 $dim1=15631 #k.165=(36,6,64,32)f32 #15666=(36,6,32,64)f32 torch.matmul torch.matmul_2366 2 1 q0.93 15666 attn.331 $input=q0.93 $other=15666 #q0.93=(36,6,64,32)f32 #15666=(36,6,32,64)f32 #attn.331=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3379 2 1 relative_position_bias0.93 25220 15677 $input=relative_position_bias0.93 $dim=25220 #relative_position_bias0.93=(6,64,64)f32 #15677=(1,6,64,64)f32 aten::add pnnx_16592 3 1 attn.331 15677 25221 attn0.47 #attn.331=(36,6,64,64)f32 #15677=(1,6,64,64)f32 #attn0.47=(36,6,64,64)f32 prim::Constant pnnx_16593 0 1 25222 value=0 aten::size pnnx_16594 2 1 attn_mask.83 25222 15679 #attn_mask.83=(36,64,64)f32 prim::NumToTensor pnnx_16595 1 1 15679 other.83 aten::Int pnnx_16596 1 1 other.83 15681 prim::Constant pnnx_16597 0 1 25223 value=trunc aten::div pnnx_16598 3 1 B_.165 other.83 25223 15682 aten::Int pnnx_16599 1 1 15682 15683 prim::Constant pnnx_16600 0 1 25224 value=6 prim::ListConstruct pnnx_16601 5 1 15683 15681 25224 15650 15649 15684 prim::Constant pnnx_16603 0 1 25225 value=1 prim::Constant pnnx_16605 0 1 25226 value=0 prim::Constant pnnx_16607 0 1 25227 value=1 Tensor.view Tensor.view_1885 2 1 attn0.47 15684 15685 $input=attn0.47 $shape=15684 #attn0.47=(36,6,64,64)f32 #15685=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3380 2 1 attn_mask.83 25225 15686 $input=attn_mask.83 $dim=25225 #attn_mask.83=(36,64,64)f32 #15686=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3381 2 1 15686 25226 15687 $input=15686 $dim=25226 #15686=(36,1,64,64)f32 #15687=(1,36,1,64,64)f32 aten::add pnnx_16608 3 1 15685 15687 25227 attn1.47 #15685=(1,36,6,64,64)f32 #15687=(1,36,1,64,64)f32 #attn1.47=(1,36,6,64,64)f32 prim::Constant pnnx_16609 0 1 25228 value=-1 prim::Constant pnnx_16610 0 1 25229 value=6 prim::ListConstruct pnnx_16611 4 1 25228 25229 15648 15647 15689 Tensor.view Tensor.view_1886 2 1 attn1.47 15689 input.371 $input=attn1.47 $shape=15689 #attn1.47=(1,36,6,64,64)f32 #input.371=(36,6,64,64)f32 nn.Softmax layers_mmsa.1.residual_group.blocks.3.attn.softmax 1 1 input.371 15691 dim=-1 #input.371=(36,6,64,64)f32 #15691=(36,6,64,64)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.3.attn.attn_drop 1 1 15691 15692 #15691=(36,6,64,64)f32 #15692=(36,6,64,64)f32 Tensor.select Tensor.select_895 3 1 qkv0.93 25211 25212 v.165 $input=qkv0.93 $dim=25211 $index=25212 #qkv0.93=(3,36,6,64,32)f32 #v.165=(36,6,64,32)f32 prim::Constant pnnx_16614 0 1 25230 value=1 prim::Constant pnnx_16615 0 1 25231 value=2 torch.matmul torch.matmul_2367 2 1 15692 v.165 15693 $input=15692 $other=v.165 #15692=(36,6,64,64)f32 #v.165=(36,6,64,32)f32 #15693=(36,6,64,32)f32 prim::ListConstruct pnnx_16617 3 1 15642 15646 15654 15695 torch.transpose torch.transpose_3154 3 1 15693 25230 25231 15694 $input=15693 $dim0=25230 $dim1=25231 #15693=(36,6,64,32)f32 #15694=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_597 2 1 15694 15695 input0.97 $input=15694 $shape=15695 #15694=(36,64,6,32)f32 #input0.97=(36,64,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.3.attn.proj 1 1 input0.97 15697 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.97=(36,64,192)f32 #15697=(36,64,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.3.attn.proj_drop 1 1 15697 15698 #15697=(36,64,192)f32 #15698=(36,64,192)f32 prim::Constant pnnx_16619 0 1 25232 value=-1 prim::Constant pnnx_16620 0 1 25233 value=8 prim::Constant pnnx_16621 0 1 25234 value=8 prim::ListConstruct pnnx_16622 4 1 25232 25233 25234 15588 15699 prim::Constant pnnx_16624 0 1 25235 value=8 prim::Constant pnnx_16625 0 1 25236 value=trunc aten::div pnnx_16626 3 1 H1.1 25235 25236 15701 aten::Int pnnx_16627 1 1 15701 15702 prim::Constant pnnx_16628 0 1 25237 value=8 prim::Constant pnnx_16629 0 1 25238 value=trunc aten::div pnnx_16630 3 1 W1.1 25237 25238 15703 aten::Int pnnx_16631 1 1 15703 15704 prim::Constant pnnx_16632 0 1 25239 value=1 prim::Constant pnnx_16633 0 1 25240 value=8 prim::Constant pnnx_16634 0 1 25241 value=8 prim::Constant pnnx_16635 0 1 25242 value=-1 prim::ListConstruct pnnx_16636 6 1 25239 15702 15704 25240 25241 25242 15705 prim::Constant pnnx_16638 0 1 25243 value=0 prim::Constant pnnx_16639 0 1 25244 value=1 prim::Constant pnnx_16640 0 1 25245 value=3 prim::Constant pnnx_16641 0 1 25246 value=2 prim::Constant pnnx_16642 0 1 25247 value=4 prim::Constant pnnx_16643 0 1 25248 value=5 prim::ListConstruct pnnx_16644 6 1 25243 25244 25245 25246 25247 25248 15707 Tensor.view Tensor.view_1887 2 1 15698 15699 windows.165 $input=15698 $shape=15699 #15698=(36,64,192)f32 #windows.165=(36,8,8,192)f32 Tensor.view Tensor.view_1888 2 1 windows.165 15705 x3.93 $input=windows.165 $shape=15705 #windows.165=(36,8,8,192)f32 #x3.93=(1,6,6,8,8,192)f32 prim::Constant pnnx_16648 0 1 25250 value=1 prim::Constant pnnx_16649 0 1 25251 value=-1 prim::ListConstruct pnnx_16650 4 1 25250 1711 1951 25251 15710 torch.permute torch.permute_2855 2 1 x3.93 15707 15708 $input=x3.93 $dims=15707 #x3.93=(1,6,6,8,8,192)f32 #15708=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_247 1 1 15708 15709 memory_format=torch.contiguous_format $input=15708 #15708=(1,6,8,6,8,192)f32 #15709=(1,6,8,6,8,192)f32 prim::Constant pnnx_16652 0 1 25252 value=4 prim::Constant pnnx_16653 0 1 25253 value=4 prim::ListConstruct pnnx_16654 2 1 25252 25253 15712 prim::Constant pnnx_16655 0 1 25254 value=1 prim::Constant pnnx_16656 0 1 25255 value=2 prim::ListConstruct pnnx_16657 2 1 25254 25255 15713 Tensor.view Tensor.view_1889 2 1 15709 15710 shifted_x.83 $input=15709 $shape=15710 #15709=(1,6,8,6,8,192)f32 #shifted_x.83=(1,48,48,192)f32 aten::mul pnnx_16659 2 1 H1.1 W1.1 15715 aten::Int pnnx_16660 1 1 15715 15716 prim::ListConstruct pnnx_16661 3 1 15583 15716 15587 15717 prim::Constant pnnx_16663 0 1 15719 value=None prim::Constant pnnx_16664 0 1 25256 value=1 torch.roll torch.roll_2501 3 1 shifted_x.83 15712 15713 x4.93 $input=shifted_x.83 $shifts=15712 $dims=15713 #shifted_x.83=(1,48,48,192)f32 #x4.93=(1,48,48,192)f32 Tensor.view Tensor.view_1890 2 1 x4.93 15717 x5.83 $input=x4.93 $shape=15717 #x4.93=(1,48,48,192)f32 #x5.83=(1,2304,192)f32 aten::add pnnx_16665 3 1 15562 x5.83 25256 input.373 #15562=(1,2304,192)f32 #x5.83=(1,2304,192)f32 #input.373=(1,2304,192)f32 nn.LayerNorm layers_mmsa.1.residual_group.blocks.3.norm2 1 1 input.373 15721 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.373=(1,2304,192)f32 #15721=(1,2304,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.3.mlp.fc1 1 1 15721 15726 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #15721=(1,2304,192)f32 #15726=(1,2304,384)f32 nn.GELU layers_mmsa.1.residual_group.blocks.3.mlp.act 1 1 15726 15727 #15726=(1,2304,384)f32 #15727=(1,2304,384)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.3.mlp.drop 1 1 15727 15728 #15727=(1,2304,384)f32 #15728=(1,2304,384)f32 nn.Linear layers_mmsa.1.residual_group.blocks.3.mlp.fc2 1 1 15728 15729 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #15728=(1,2304,384)f32 #15729=(1,2304,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.3.mlp.drop 1 1 15729 15730 #15729=(1,2304,192)f32 #15730=(1,2304,192)f32 prim::Constant pnnx_16666 0 1 15731 value=None prim::Constant pnnx_16667 0 1 25257 value=1 aten::add pnnx_16668 3 1 input.373 15730 25257 15732 #input.373=(1,2304,192)f32 #15730=(1,2304,192)f32 #15732=(1,2304,192)f32 prim::Constant pnnx_16669 0 1 15733 value=trunc prim::Constant pnnx_16670 0 1 15734 value=8 prim::Constant pnnx_16671 0 1 15735 value=0 prim::Constant pnnx_16672 0 1 15736 value=2 prim::Constant pnnx_16673 0 1 15737 value=1 prim::Constant pnnx_16674 0 1 15738 value=3 prim::Constant pnnx_16675 0 1 15739 value=8 prim::Constant pnnx_16676 0 1 15740 value=4 prim::Constant pnnx_16677 0 1 15741 value=5 prim::Constant pnnx_16678 0 1 15742 value=-1 prim::Constant pnnx_16679 0 1 15743 value=64 aten::size pnnx_16680 2 1 15732 15735 15749 #15732=(1,2304,192)f32 prim::NumToTensor pnnx_16681 1 1 15749 B.199 aten::Int pnnx_16682 1 1 B.199 15751 aten::Int pnnx_16683 1 1 B.199 15752 aten::size pnnx_16684 2 1 15732 15736 15753 #15732=(1,2304,192)f32 prim::NumToTensor pnnx_16685 1 1 15753 C.339 aten::Int pnnx_16686 1 1 C.339 15755 aten::Int pnnx_16687 1 1 C.339 15756 aten::Int pnnx_16688 1 1 C.339 15757 aten::Int pnnx_16689 1 1 C.339 15758 nn.LayerNorm layers_mmsa.1.residual_group.blocks.4.norm1 1 1 15732 15759 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #15732=(1,2304,192)f32 #15759=(1,2304,192)f32 prim::ListConstruct pnnx_16690 4 1 15752 1708 1948 15758 15760 prim::Constant pnnx_16692 0 1 25258 value=0 Tensor.view Tensor.view_1891 2 1 15759 15760 x.167 $input=15759 $shape=15760 #15759=(1,2304,192)f32 #x.167=(1,48,48,192)f32 aten::size pnnx_16693 2 1 x.167 25258 15762 #x.167=(1,48,48,192)f32 prim::NumToTensor pnnx_16694 1 1 15762 B0.95 aten::Int pnnx_16695 1 1 B0.95 15764 aten::size pnnx_16696 2 1 x.167 15737 15765 #x.167=(1,48,48,192)f32 prim::NumToTensor pnnx_16697 1 1 15765 15766 prim::Constant pnnx_16698 0 1 25259 value=2 aten::size pnnx_16699 2 1 x.167 25259 15767 #x.167=(1,48,48,192)f32 prim::NumToTensor pnnx_16700 1 1 15767 15768 aten::size pnnx_16701 2 1 x.167 15738 15769 #x.167=(1,48,48,192)f32 prim::NumToTensor pnnx_16702 1 1 15769 C0.95 aten::Int pnnx_16703 1 1 C0.95 15771 aten::Int pnnx_16704 1 1 C0.95 15772 aten::div pnnx_16705 3 1 15766 15734 15733 15773 aten::Int pnnx_16706 1 1 15773 15774 prim::Constant pnnx_16707 0 1 25260 value=8 prim::Constant pnnx_16708 0 1 25261 value=trunc aten::div pnnx_16709 3 1 15768 25260 25261 15775 aten::Int pnnx_16710 1 1 15775 15776 prim::Constant pnnx_16711 0 1 25262 value=8 prim::ListConstruct pnnx_16712 6 1 15764 15774 15739 15776 25262 15772 15777 prim::Constant pnnx_16714 0 1 25263 value=0 prim::Constant pnnx_16715 0 1 25264 value=1 prim::Constant pnnx_16716 0 1 25265 value=3 prim::Constant pnnx_16717 0 1 25266 value=2 prim::ListConstruct pnnx_16718 6 1 25263 25264 25265 25266 15740 15741 15779 Tensor.view Tensor.view_1892 2 1 x.167 15777 x0.95 $input=x.167 $shape=15777 #x.167=(1,48,48,192)f32 #x0.95=(1,6,8,6,8,192)f32 prim::Constant pnnx_16722 0 1 25268 value=8 prim::Constant pnnx_16723 0 1 25269 value=8 prim::ListConstruct pnnx_16724 4 1 15742 25268 25269 15771 15782 torch.permute torch.permute_2856 2 1 x0.95 15779 15780 $input=x0.95 $dims=15779 #x0.95=(1,6,8,6,8,192)f32 #15780=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_248 1 1 15780 15781 memory_format=torch.contiguous_format $input=15780 #15780=(1,6,6,8,8,192)f32 #15781=(1,6,6,8,8,192)f32 prim::Constant pnnx_16726 0 1 25270 value=-1 prim::ListConstruct pnnx_16727 3 1 25270 15743 15757 15784 prim::Constant pnnx_16729 0 1 15786 value=1.767767e-01 prim::Constant pnnx_16730 0 1 15787 value=trunc prim::Constant pnnx_16731 0 1 15788 value=6 prim::Constant pnnx_16732 0 1 15789 value=0 prim::Constant pnnx_16733 0 1 15790 value=1 prim::Constant pnnx_16734 0 1 15791 value=2 prim::Constant pnnx_16735 0 1 15792 value=3 prim::Constant pnnx_16736 0 1 15793 value=6 prim::Constant pnnx_16737 0 1 15794 value=4 prim::Constant pnnx_16738 0 1 15795 value=-2 prim::Constant pnnx_16739 0 1 15796 value=-1 prim::Constant pnnx_16740 0 1 15797 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.4.attn 0 1 relative_position_bias_table.167 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.167=(225,6)f32 pnnx.Attribute layers_mmsa.1.residual_group.blocks.4.attn 0 1 relative_position_index.167 @relative_position_index=(64,64)i64 #relative_position_index.167=(64,64)i64 Tensor.view Tensor.view_1893 2 1 15781 15782 x_windows.167 $input=15781 $shape=15782 #15781=(1,6,6,8,8,192)f32 #x_windows.167=(36,8,8,192)f32 Tensor.view Tensor.view_1894 2 1 x_windows.167 15784 x1.95 $input=x_windows.167 $shape=15784 #x_windows.167=(36,8,8,192)f32 #x1.95=(36,64,192)f32 aten::size pnnx_16741 2 1 x1.95 15789 15805 #x1.95=(36,64,192)f32 prim::NumToTensor pnnx_16742 1 1 15805 B_.167 aten::Int pnnx_16743 1 1 B_.167 15807 aten::Int pnnx_16744 1 1 B_.167 15808 aten::size pnnx_16745 2 1 x1.95 15790 15809 #x1.95=(36,64,192)f32 prim::NumToTensor pnnx_16746 1 1 15809 N.167 aten::Int pnnx_16747 1 1 N.167 15811 aten::Int pnnx_16748 1 1 N.167 15812 aten::size pnnx_16749 2 1 x1.95 15791 15813 #x1.95=(36,64,192)f32 prim::NumToTensor pnnx_16750 1 1 15813 C.341 aten::Int pnnx_16751 1 1 C.341 15815 nn.Linear layers_mmsa.1.residual_group.blocks.4.attn.qkv 1 1 x1.95 15816 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.95=(36,64,192)f32 #15816=(36,64,576)f32 aten::div pnnx_16752 3 1 C.341 15788 15787 15817 aten::Int pnnx_16753 1 1 15817 15818 prim::ListConstruct pnnx_16754 5 1 15808 15812 15792 15793 15818 15819 prim::Constant pnnx_16756 0 1 25271 value=2 prim::Constant pnnx_16757 0 1 25272 value=0 prim::Constant pnnx_16758 0 1 25273 value=3 prim::Constant pnnx_16759 0 1 25274 value=1 prim::ListConstruct pnnx_16760 5 1 25271 25272 25273 25274 15794 15821 Tensor.reshape Tensor.reshape_598 2 1 15816 15819 15820 $input=15816 $shape=15819 #15816=(36,64,576)f32 #15820=(36,64,3,6,32)f32 prim::Constant pnnx_16762 0 1 25275 value=0 prim::Constant pnnx_16763 0 1 25276 value=0 prim::Constant pnnx_16765 0 1 25277 value=0 prim::Constant pnnx_16766 0 1 25278 value=1 prim::Constant pnnx_16768 0 1 25279 value=0 prim::Constant pnnx_16769 0 1 25280 value=2 torch.permute torch.permute_2857 2 1 15820 15821 qkv0.95 $input=15820 $dims=15821 #15820=(36,64,3,6,32)f32 #qkv0.95=(3,36,6,64,32)f32 Tensor.select Tensor.select_896 3 1 qkv0.95 25275 25276 q.167 $input=qkv0.95 $dim=25275 $index=25276 #qkv0.95=(3,36,6,64,32)f32 #q.167=(36,6,64,32)f32 aten::mul pnnx_16771 2 1 q.167 15786 q0.95 #q.167=(36,6,64,32)f32 #q0.95=(36,6,64,32)f32 Tensor.select Tensor.select_897 3 1 qkv0.95 25277 25278 k.167 $input=qkv0.95 $dim=25277 $index=25278 #qkv0.95=(3,36,6,64,32)f32 #k.167=(36,6,64,32)f32 prim::Constant pnnx_16774 0 1 25281 value=-1 prim::ListConstruct pnnx_16775 1 1 25281 15829 Tensor.view Tensor.view_1895 2 1 relative_position_index.167 15829 15830 $input=relative_position_index.167 $shape=15829 #relative_position_index.167=(64,64)i64 #15830=(4096)i64 prim::ListConstruct pnnx_16777 1 1 15830 15831 #15830=(4096)i64 prim::Constant pnnx_16779 0 1 25282 value=64 prim::Constant pnnx_16780 0 1 25283 value=-1 prim::ListConstruct pnnx_16781 3 1 15797 25282 25283 15833 Tensor.index Tensor.index_408 2 1 relative_position_bias_table.167 15831 15832 $input=relative_position_bias_table.167 $expr=15831 #relative_position_bias_table.167=(225,6)f32 #15832=(4096,6)f32 prim::Constant pnnx_16783 0 1 25284 value=2 prim::Constant pnnx_16784 0 1 25285 value=0 prim::Constant pnnx_16785 0 1 25286 value=1 prim::ListConstruct pnnx_16786 3 1 25284 25285 25286 15835 Tensor.view Tensor.view_1896 2 1 15832 15833 relative_position_bias.167 $input=15832 $shape=15833 #15832=(4096,6)f32 #relative_position_bias.167=(64,64,6)f32 prim::Constant pnnx_16790 0 1 25288 value=0 torch.permute torch.permute_2858 2 1 relative_position_bias.167 15835 15836 $input=relative_position_bias.167 $dims=15835 #relative_position_bias.167=(64,64,6)f32 #15836=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_249 1 1 15836 relative_position_bias0.95 memory_format=torch.contiguous_format $input=15836 #15836=(6,64,64)f32 #relative_position_bias0.95=(6,64,64)f32 prim::Constant pnnx_16792 0 1 25289 value=1 torch.transpose torch.transpose_3155 3 1 k.167 15795 15796 15827 $input=k.167 $dim0=15795 $dim1=15796 #k.167=(36,6,64,32)f32 #15827=(36,6,32,64)f32 torch.matmul torch.matmul_2368 2 1 q0.95 15827 attn.335 $input=q0.95 $other=15827 #q0.95=(36,6,64,32)f32 #15827=(36,6,32,64)f32 #attn.335=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3382 2 1 relative_position_bias0.95 25288 15838 $input=relative_position_bias0.95 $dim=25288 #relative_position_bias0.95=(6,64,64)f32 #15838=(1,6,64,64)f32 aten::add pnnx_16793 3 1 attn.335 15838 25289 input.375 #attn.335=(36,6,64,64)f32 #15838=(1,6,64,64)f32 #input.375=(36,6,64,64)f32 nn.Softmax layers_mmsa.1.residual_group.blocks.4.attn.softmax 1 1 input.375 15840 dim=-1 #input.375=(36,6,64,64)f32 #15840=(36,6,64,64)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.4.attn.attn_drop 1 1 15840 15841 #15840=(36,6,64,64)f32 #15841=(36,6,64,64)f32 Tensor.select Tensor.select_898 3 1 qkv0.95 25279 25280 v.167 $input=qkv0.95 $dim=25279 $index=25280 #qkv0.95=(3,36,6,64,32)f32 #v.167=(36,6,64,32)f32 prim::Constant pnnx_16795 0 1 25290 value=1 prim::Constant pnnx_16796 0 1 25291 value=2 torch.matmul torch.matmul_2369 2 1 15841 v.167 15842 $input=15841 $other=v.167 #15841=(36,6,64,64)f32 #v.167=(36,6,64,32)f32 #15842=(36,6,64,32)f32 prim::ListConstruct pnnx_16798 3 1 15807 15811 15815 15844 torch.transpose torch.transpose_3156 3 1 15842 25290 25291 15843 $input=15842 $dim0=25290 $dim1=25291 #15842=(36,6,64,32)f32 #15843=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_599 2 1 15843 15844 input0.99 $input=15843 $shape=15844 #15843=(36,64,6,32)f32 #input0.99=(36,64,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.4.attn.proj 1 1 input0.99 15846 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.99=(36,64,192)f32 #15846=(36,64,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.4.attn.proj_drop 1 1 15846 15847 #15846=(36,64,192)f32 #15847=(36,64,192)f32 prim::Constant pnnx_16800 0 1 25292 value=-1 prim::Constant pnnx_16801 0 1 25293 value=8 prim::Constant pnnx_16802 0 1 25294 value=8 prim::ListConstruct pnnx_16803 4 1 25292 25293 25294 15756 15848 prim::Constant pnnx_16805 0 1 25295 value=8 prim::Constant pnnx_16806 0 1 25296 value=trunc aten::div pnnx_16807 3 1 H1.1 25295 25296 15850 aten::Int pnnx_16808 1 1 15850 15851 prim::Constant pnnx_16809 0 1 25297 value=8 prim::Constant pnnx_16810 0 1 25298 value=trunc aten::div pnnx_16811 3 1 W1.1 25297 25298 15852 aten::Int pnnx_16812 1 1 15852 15853 prim::Constant pnnx_16813 0 1 25299 value=1 prim::Constant pnnx_16814 0 1 25300 value=8 prim::Constant pnnx_16815 0 1 25301 value=8 prim::Constant pnnx_16816 0 1 25302 value=-1 prim::ListConstruct pnnx_16817 6 1 25299 15851 15853 25300 25301 25302 15854 prim::Constant pnnx_16819 0 1 25303 value=0 prim::Constant pnnx_16820 0 1 25304 value=1 prim::Constant pnnx_16821 0 1 25305 value=3 prim::Constant pnnx_16822 0 1 25306 value=2 prim::Constant pnnx_16823 0 1 25307 value=4 prim::Constant pnnx_16824 0 1 25308 value=5 prim::ListConstruct pnnx_16825 6 1 25303 25304 25305 25306 25307 25308 15856 Tensor.view Tensor.view_1897 2 1 15847 15848 windows.167 $input=15847 $shape=15848 #15847=(36,64,192)f32 #windows.167=(36,8,8,192)f32 Tensor.view Tensor.view_1898 2 1 windows.167 15854 x2.95 $input=windows.167 $shape=15854 #windows.167=(36,8,8,192)f32 #x2.95=(1,6,6,8,8,192)f32 prim::Constant pnnx_16829 0 1 25310 value=1 prim::Constant pnnx_16830 0 1 25311 value=-1 prim::ListConstruct pnnx_16831 4 1 25310 1705 1945 25311 15859 torch.permute torch.permute_2859 2 1 x2.95 15856 15857 $input=x2.95 $dims=15856 #x2.95=(1,6,6,8,8,192)f32 #15857=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_250 1 1 15857 15858 memory_format=torch.contiguous_format $input=15857 #15857=(1,6,8,6,8,192)f32 #15858=(1,6,8,6,8,192)f32 aten::mul pnnx_16833 2 1 H1.1 W1.1 15861 aten::Int pnnx_16834 1 1 15861 15862 prim::ListConstruct pnnx_16835 3 1 15751 15862 15755 15863 prim::Constant pnnx_16837 0 1 15865 value=None prim::Constant pnnx_16838 0 1 25312 value=1 Tensor.view Tensor.view_1899 2 1 15858 15859 x3.95 $input=15858 $shape=15859 #15858=(1,6,8,6,8,192)f32 #x3.95=(1,48,48,192)f32 Tensor.view Tensor.view_1900 2 1 x3.95 15863 x4.95 $input=x3.95 $shape=15863 #x3.95=(1,48,48,192)f32 #x4.95=(1,2304,192)f32 aten::add pnnx_16839 3 1 15732 x4.95 25312 input.377 #15732=(1,2304,192)f32 #x4.95=(1,2304,192)f32 #input.377=(1,2304,192)f32 nn.LayerNorm layers_mmsa.1.residual_group.blocks.4.norm2 1 1 input.377 15867 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.377=(1,2304,192)f32 #15867=(1,2304,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.4.mlp.fc1 1 1 15867 15872 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #15867=(1,2304,192)f32 #15872=(1,2304,384)f32 nn.GELU layers_mmsa.1.residual_group.blocks.4.mlp.act 1 1 15872 15873 #15872=(1,2304,384)f32 #15873=(1,2304,384)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.4.mlp.drop 1 1 15873 15874 #15873=(1,2304,384)f32 #15874=(1,2304,384)f32 nn.Linear layers_mmsa.1.residual_group.blocks.4.mlp.fc2 1 1 15874 15875 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #15874=(1,2304,384)f32 #15875=(1,2304,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.4.mlp.drop 1 1 15875 15876 #15875=(1,2304,192)f32 #15876=(1,2304,192)f32 prim::Constant pnnx_16840 0 1 15877 value=None prim::Constant pnnx_16841 0 1 25313 value=1 aten::add pnnx_16842 3 1 input.377 15876 25313 15878 #input.377=(1,2304,192)f32 #15876=(1,2304,192)f32 #15878=(1,2304,192)f32 prim::Constant pnnx_16843 0 1 15879 value=trunc prim::Constant pnnx_16844 0 1 15880 value=8 prim::Constant pnnx_16845 0 1 15881 value=0 prim::Constant pnnx_16846 0 1 15882 value=2 prim::Constant pnnx_16847 0 1 15883 value=-4 prim::Constant pnnx_16848 0 1 15884 value=1 prim::Constant pnnx_16849 0 1 15885 value=3 prim::Constant pnnx_16850 0 1 15886 value=8 prim::Constant pnnx_16851 0 1 15887 value=4 prim::Constant pnnx_16852 0 1 15888 value=5 prim::Constant pnnx_16853 0 1 15889 value=-1 prim::Constant pnnx_16854 0 1 15890 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.5 0 1 attn_mask.85 @attn_mask=(36,64,64)f32 #attn_mask.85=(36,64,64)f32 aten::size pnnx_16855 2 1 15878 15881 15897 #15878=(1,2304,192)f32 prim::NumToTensor pnnx_16856 1 1 15897 B.201 aten::Int pnnx_16857 1 1 B.201 15899 aten::Int pnnx_16858 1 1 B.201 15900 aten::size pnnx_16859 2 1 15878 15882 15901 #15878=(1,2304,192)f32 prim::NumToTensor pnnx_16860 1 1 15901 C.343 aten::Int pnnx_16861 1 1 C.343 15903 aten::Int pnnx_16862 1 1 C.343 15904 aten::Int pnnx_16863 1 1 C.343 15905 aten::Int pnnx_16864 1 1 C.343 15906 nn.LayerNorm layers_mmsa.1.residual_group.blocks.5.norm1 1 1 15878 15907 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #15878=(1,2304,192)f32 #15907=(1,2304,192)f32 prim::ListConstruct pnnx_16865 4 1 15900 1702 1942 15906 15908 prim::Constant pnnx_16867 0 1 25314 value=-4 prim::ListConstruct pnnx_16868 2 1 15883 25314 15910 prim::Constant pnnx_16869 0 1 25315 value=2 prim::ListConstruct pnnx_16870 2 1 15884 25315 15911 Tensor.view Tensor.view_1901 2 1 15907 15908 x.169 $input=15907 $shape=15908 #15907=(1,2304,192)f32 #x.169=(1,48,48,192)f32 prim::Constant pnnx_16872 0 1 25316 value=0 torch.roll torch.roll_2502 3 1 x.169 15910 15911 x0.97 $input=x.169 $shifts=15910 $dims=15911 #x.169=(1,48,48,192)f32 #x0.97=(1,48,48,192)f32 aten::size pnnx_16873 2 1 x0.97 25316 15913 #x0.97=(1,48,48,192)f32 prim::NumToTensor pnnx_16874 1 1 15913 B0.97 aten::Int pnnx_16875 1 1 B0.97 15915 prim::Constant pnnx_16876 0 1 25317 value=1 aten::size pnnx_16877 2 1 x0.97 25317 15916 #x0.97=(1,48,48,192)f32 prim::NumToTensor pnnx_16878 1 1 15916 15917 prim::Constant pnnx_16879 0 1 25318 value=2 aten::size pnnx_16880 2 1 x0.97 25318 15918 #x0.97=(1,48,48,192)f32 prim::NumToTensor pnnx_16881 1 1 15918 15919 aten::size pnnx_16882 2 1 x0.97 15885 15920 #x0.97=(1,48,48,192)f32 prim::NumToTensor pnnx_16883 1 1 15920 C0.97 aten::Int pnnx_16884 1 1 C0.97 15922 aten::Int pnnx_16885 1 1 C0.97 15923 aten::div pnnx_16886 3 1 15917 15880 15879 15924 aten::Int pnnx_16887 1 1 15924 15925 prim::Constant pnnx_16888 0 1 25319 value=8 prim::Constant pnnx_16889 0 1 25320 value=trunc aten::div pnnx_16890 3 1 15919 25319 25320 15926 aten::Int pnnx_16891 1 1 15926 15927 prim::Constant pnnx_16892 0 1 25321 value=8 prim::ListConstruct pnnx_16893 6 1 15915 15925 15886 15927 25321 15923 15928 prim::Constant pnnx_16895 0 1 25322 value=0 prim::Constant pnnx_16896 0 1 25323 value=1 prim::Constant pnnx_16897 0 1 25324 value=3 prim::Constant pnnx_16898 0 1 25325 value=2 prim::ListConstruct pnnx_16899 6 1 25322 25323 25324 25325 15887 15888 15930 Tensor.view Tensor.view_1902 2 1 x0.97 15928 x1.97 $input=x0.97 $shape=15928 #x0.97=(1,48,48,192)f32 #x1.97=(1,6,8,6,8,192)f32 prim::Constant pnnx_16903 0 1 25327 value=8 prim::Constant pnnx_16904 0 1 25328 value=8 prim::ListConstruct pnnx_16905 4 1 15889 25327 25328 15922 15933 torch.permute torch.permute_2860 2 1 x1.97 15930 15931 $input=x1.97 $dims=15930 #x1.97=(1,6,8,6,8,192)f32 #15931=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_251 1 1 15931 15932 memory_format=torch.contiguous_format $input=15931 #15931=(1,6,6,8,8,192)f32 #15932=(1,6,6,8,8,192)f32 prim::Constant pnnx_16907 0 1 25329 value=-1 prim::ListConstruct pnnx_16908 3 1 25329 15890 15905 15935 prim::Constant pnnx_16910 0 1 15937 value=1.767767e-01 prim::Constant pnnx_16911 0 1 15938 value=trunc prim::Constant pnnx_16912 0 1 15939 value=6 prim::Constant pnnx_16913 0 1 15940 value=0 prim::Constant pnnx_16914 0 1 15941 value=1 prim::Constant pnnx_16915 0 1 15942 value=2 prim::Constant pnnx_16916 0 1 15943 value=3 prim::Constant pnnx_16917 0 1 15944 value=6 prim::Constant pnnx_16918 0 1 15945 value=4 prim::Constant pnnx_16919 0 1 15946 value=-2 prim::Constant pnnx_16920 0 1 15947 value=-1 prim::Constant pnnx_16921 0 1 15948 value=64 pnnx.Attribute layers_mmsa.1.residual_group.blocks.5.attn 0 1 relative_position_bias_table.169 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.169=(225,6)f32 pnnx.Attribute layers_mmsa.1.residual_group.blocks.5.attn 0 1 relative_position_index.169 @relative_position_index=(64,64)i64 #relative_position_index.169=(64,64)i64 Tensor.view Tensor.view_1903 2 1 15932 15933 x_windows.169 $input=15932 $shape=15933 #15932=(1,6,6,8,8,192)f32 #x_windows.169=(36,8,8,192)f32 Tensor.view Tensor.view_1904 2 1 x_windows.169 15935 x2.97 $input=x_windows.169 $shape=15935 #x_windows.169=(36,8,8,192)f32 #x2.97=(36,64,192)f32 aten::size pnnx_16922 2 1 x2.97 15940 15956 #x2.97=(36,64,192)f32 prim::NumToTensor pnnx_16923 1 1 15956 B_.169 aten::Int pnnx_16924 1 1 B_.169 15958 aten::Int pnnx_16925 1 1 B_.169 15959 aten::size pnnx_16926 2 1 x2.97 15941 15960 #x2.97=(36,64,192)f32 prim::NumToTensor pnnx_16927 1 1 15960 N.169 aten::Int pnnx_16928 1 1 N.169 15962 aten::Int pnnx_16929 1 1 N.169 15963 aten::Int pnnx_16930 1 1 N.169 15964 aten::Int pnnx_16931 1 1 N.169 15965 aten::Int pnnx_16932 1 1 N.169 15966 aten::Int pnnx_16933 1 1 N.169 15967 aten::size pnnx_16934 2 1 x2.97 15942 15968 #x2.97=(36,64,192)f32 prim::NumToTensor pnnx_16935 1 1 15968 C.345 aten::Int pnnx_16936 1 1 C.345 15970 nn.Linear layers_mmsa.1.residual_group.blocks.5.attn.qkv 1 1 x2.97 15971 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.97=(36,64,192)f32 #15971=(36,64,576)f32 aten::div pnnx_16937 3 1 C.345 15939 15938 15972 aten::Int pnnx_16938 1 1 15972 15973 prim::ListConstruct pnnx_16939 5 1 15959 15967 15943 15944 15973 15974 prim::Constant pnnx_16941 0 1 25330 value=2 prim::Constant pnnx_16942 0 1 25331 value=0 prim::Constant pnnx_16943 0 1 25332 value=3 prim::Constant pnnx_16944 0 1 25333 value=1 prim::ListConstruct pnnx_16945 5 1 25330 25331 25332 25333 15945 15976 Tensor.reshape Tensor.reshape_600 2 1 15971 15974 15975 $input=15971 $shape=15974 #15971=(36,64,576)f32 #15975=(36,64,3,6,32)f32 prim::Constant pnnx_16947 0 1 25334 value=0 prim::Constant pnnx_16948 0 1 25335 value=0 prim::Constant pnnx_16950 0 1 25336 value=0 prim::Constant pnnx_16951 0 1 25337 value=1 prim::Constant pnnx_16953 0 1 25338 value=0 prim::Constant pnnx_16954 0 1 25339 value=2 torch.permute torch.permute_2861 2 1 15975 15976 qkv0.97 $input=15975 $dims=15976 #15975=(36,64,3,6,32)f32 #qkv0.97=(3,36,6,64,32)f32 Tensor.select Tensor.select_899 3 1 qkv0.97 25334 25335 q.169 $input=qkv0.97 $dim=25334 $index=25335 #qkv0.97=(3,36,6,64,32)f32 #q.169=(36,6,64,32)f32 aten::mul pnnx_16956 2 1 q.169 15937 q0.97 #q.169=(36,6,64,32)f32 #q0.97=(36,6,64,32)f32 Tensor.select Tensor.select_900 3 1 qkv0.97 25336 25337 k.169 $input=qkv0.97 $dim=25336 $index=25337 #qkv0.97=(3,36,6,64,32)f32 #k.169=(36,6,64,32)f32 prim::Constant pnnx_16959 0 1 25340 value=-1 prim::ListConstruct pnnx_16960 1 1 25340 15984 Tensor.view Tensor.view_1905 2 1 relative_position_index.169 15984 15985 $input=relative_position_index.169 $shape=15984 #relative_position_index.169=(64,64)i64 #15985=(4096)i64 prim::ListConstruct pnnx_16962 1 1 15985 15986 #15985=(4096)i64 prim::Constant pnnx_16964 0 1 25341 value=64 prim::Constant pnnx_16965 0 1 25342 value=-1 prim::ListConstruct pnnx_16966 3 1 15948 25341 25342 15988 Tensor.index Tensor.index_409 2 1 relative_position_bias_table.169 15986 15987 $input=relative_position_bias_table.169 $expr=15986 #relative_position_bias_table.169=(225,6)f32 #15987=(4096,6)f32 prim::Constant pnnx_16968 0 1 25343 value=2 prim::Constant pnnx_16969 0 1 25344 value=0 prim::Constant pnnx_16970 0 1 25345 value=1 prim::ListConstruct pnnx_16971 3 1 25343 25344 25345 15990 Tensor.view Tensor.view_1906 2 1 15987 15988 relative_position_bias.169 $input=15987 $shape=15988 #15987=(4096,6)f32 #relative_position_bias.169=(64,64,6)f32 prim::Constant pnnx_16975 0 1 25347 value=0 torch.permute torch.permute_2862 2 1 relative_position_bias.169 15990 15991 $input=relative_position_bias.169 $dims=15990 #relative_position_bias.169=(64,64,6)f32 #15991=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_252 1 1 15991 relative_position_bias0.97 memory_format=torch.contiguous_format $input=15991 #15991=(6,64,64)f32 #relative_position_bias0.97=(6,64,64)f32 prim::Constant pnnx_16977 0 1 25348 value=1 torch.transpose torch.transpose_3157 3 1 k.169 15946 15947 15982 $input=k.169 $dim0=15946 $dim1=15947 #k.169=(36,6,64,32)f32 #15982=(36,6,32,64)f32 torch.matmul torch.matmul_2370 2 1 q0.97 15982 attn.339 $input=q0.97 $other=15982 #q0.97=(36,6,64,32)f32 #15982=(36,6,32,64)f32 #attn.339=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3383 2 1 relative_position_bias0.97 25347 15993 $input=relative_position_bias0.97 $dim=25347 #relative_position_bias0.97=(6,64,64)f32 #15993=(1,6,64,64)f32 aten::add pnnx_16978 3 1 attn.339 15993 25348 attn0.49 #attn.339=(36,6,64,64)f32 #15993=(1,6,64,64)f32 #attn0.49=(36,6,64,64)f32 prim::Constant pnnx_16979 0 1 25349 value=0 aten::size pnnx_16980 2 1 attn_mask.85 25349 15995 #attn_mask.85=(36,64,64)f32 prim::NumToTensor pnnx_16981 1 1 15995 other.85 aten::Int pnnx_16982 1 1 other.85 15997 prim::Constant pnnx_16983 0 1 25350 value=trunc aten::div pnnx_16984 3 1 B_.169 other.85 25350 15998 aten::Int pnnx_16985 1 1 15998 15999 prim::Constant pnnx_16986 0 1 25351 value=6 prim::ListConstruct pnnx_16987 5 1 15999 15997 25351 15966 15965 16000 prim::Constant pnnx_16989 0 1 25352 value=1 prim::Constant pnnx_16991 0 1 25353 value=0 prim::Constant pnnx_16993 0 1 25354 value=1 Tensor.view Tensor.view_1907 2 1 attn0.49 16000 16001 $input=attn0.49 $shape=16000 #attn0.49=(36,6,64,64)f32 #16001=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3384 2 1 attn_mask.85 25352 16002 $input=attn_mask.85 $dim=25352 #attn_mask.85=(36,64,64)f32 #16002=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3385 2 1 16002 25353 16003 $input=16002 $dim=25353 #16002=(36,1,64,64)f32 #16003=(1,36,1,64,64)f32 aten::add pnnx_16994 3 1 16001 16003 25354 attn1.49 #16001=(1,36,6,64,64)f32 #16003=(1,36,1,64,64)f32 #attn1.49=(1,36,6,64,64)f32 prim::Constant pnnx_16995 0 1 25355 value=-1 prim::Constant pnnx_16996 0 1 25356 value=6 prim::ListConstruct pnnx_16997 4 1 25355 25356 15964 15963 16005 Tensor.view Tensor.view_1908 2 1 attn1.49 16005 input.379 $input=attn1.49 $shape=16005 #attn1.49=(1,36,6,64,64)f32 #input.379=(36,6,64,64)f32 nn.Softmax layers_mmsa.1.residual_group.blocks.5.attn.softmax 1 1 input.379 16007 dim=-1 #input.379=(36,6,64,64)f32 #16007=(36,6,64,64)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.5.attn.attn_drop 1 1 16007 16008 #16007=(36,6,64,64)f32 #16008=(36,6,64,64)f32 Tensor.select Tensor.select_901 3 1 qkv0.97 25338 25339 v.169 $input=qkv0.97 $dim=25338 $index=25339 #qkv0.97=(3,36,6,64,32)f32 #v.169=(36,6,64,32)f32 prim::Constant pnnx_17000 0 1 25357 value=1 prim::Constant pnnx_17001 0 1 25358 value=2 torch.matmul torch.matmul_2371 2 1 16008 v.169 16009 $input=16008 $other=v.169 #16008=(36,6,64,64)f32 #v.169=(36,6,64,32)f32 #16009=(36,6,64,32)f32 prim::ListConstruct pnnx_17003 3 1 15958 15962 15970 16011 torch.transpose torch.transpose_3158 3 1 16009 25357 25358 16010 $input=16009 $dim0=25357 $dim1=25358 #16009=(36,6,64,32)f32 #16010=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_601 2 1 16010 16011 input0.101 $input=16010 $shape=16011 #16010=(36,64,6,32)f32 #input0.101=(36,64,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.5.attn.proj 1 1 input0.101 16013 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.101=(36,64,192)f32 #16013=(36,64,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.5.attn.proj_drop 1 1 16013 16014 #16013=(36,64,192)f32 #16014=(36,64,192)f32 prim::Constant pnnx_17005 0 1 25359 value=-1 prim::Constant pnnx_17006 0 1 25360 value=8 prim::Constant pnnx_17007 0 1 25361 value=8 prim::ListConstruct pnnx_17008 4 1 25359 25360 25361 15904 16015 prim::Constant pnnx_17010 0 1 25362 value=8 prim::Constant pnnx_17011 0 1 25363 value=trunc aten::div pnnx_17012 3 1 H1.1 25362 25363 16017 aten::Int pnnx_17013 1 1 16017 16018 prim::Constant pnnx_17014 0 1 25364 value=8 prim::Constant pnnx_17015 0 1 25365 value=trunc aten::div pnnx_17016 3 1 W1.1 25364 25365 16019 aten::Int pnnx_17017 1 1 16019 16020 prim::Constant pnnx_17018 0 1 25366 value=1 prim::Constant pnnx_17019 0 1 25367 value=8 prim::Constant pnnx_17020 0 1 25368 value=8 prim::Constant pnnx_17021 0 1 25369 value=-1 prim::ListConstruct pnnx_17022 6 1 25366 16018 16020 25367 25368 25369 16021 prim::Constant pnnx_17024 0 1 25370 value=0 prim::Constant pnnx_17025 0 1 25371 value=1 prim::Constant pnnx_17026 0 1 25372 value=3 prim::Constant pnnx_17027 0 1 25373 value=2 prim::Constant pnnx_17028 0 1 25374 value=4 prim::Constant pnnx_17029 0 1 25375 value=5 prim::ListConstruct pnnx_17030 6 1 25370 25371 25372 25373 25374 25375 16023 Tensor.view Tensor.view_1909 2 1 16014 16015 windows.169 $input=16014 $shape=16015 #16014=(36,64,192)f32 #windows.169=(36,8,8,192)f32 Tensor.view Tensor.view_1910 2 1 windows.169 16021 x3.97 $input=windows.169 $shape=16021 #windows.169=(36,8,8,192)f32 #x3.97=(1,6,6,8,8,192)f32 prim::Constant pnnx_17034 0 1 25377 value=1 prim::Constant pnnx_17035 0 1 25378 value=-1 prim::ListConstruct pnnx_17036 4 1 25377 1699 1939 25378 16026 torch.permute torch.permute_2863 2 1 x3.97 16023 16024 $input=x3.97 $dims=16023 #x3.97=(1,6,6,8,8,192)f32 #16024=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_253 1 1 16024 16025 memory_format=torch.contiguous_format $input=16024 #16024=(1,6,8,6,8,192)f32 #16025=(1,6,8,6,8,192)f32 prim::Constant pnnx_17038 0 1 25379 value=4 prim::Constant pnnx_17039 0 1 25380 value=4 prim::ListConstruct pnnx_17040 2 1 25379 25380 16028 prim::Constant pnnx_17041 0 1 25381 value=1 prim::Constant pnnx_17042 0 1 25382 value=2 prim::ListConstruct pnnx_17043 2 1 25381 25382 16029 Tensor.view Tensor.view_1911 2 1 16025 16026 shifted_x.85 $input=16025 $shape=16026 #16025=(1,6,8,6,8,192)f32 #shifted_x.85=(1,48,48,192)f32 aten::mul pnnx_17045 2 1 H1.1 W1.1 16031 aten::Int pnnx_17046 1 1 16031 16032 prim::ListConstruct pnnx_17047 3 1 15899 16032 15903 16033 prim::Constant pnnx_17049 0 1 16035 value=None prim::Constant pnnx_17050 0 1 25383 value=1 torch.roll torch.roll_2503 3 1 shifted_x.85 16028 16029 x4.97 $input=shifted_x.85 $shifts=16028 $dims=16029 #shifted_x.85=(1,48,48,192)f32 #x4.97=(1,48,48,192)f32 Tensor.view Tensor.view_1912 2 1 x4.97 16033 x5.85 $input=x4.97 $shape=16033 #x4.97=(1,48,48,192)f32 #x5.85=(1,2304,192)f32 aten::add pnnx_17051 3 1 15878 x5.85 25383 input.381 #15878=(1,2304,192)f32 #x5.85=(1,2304,192)f32 #input.381=(1,2304,192)f32 nn.LayerNorm layers_mmsa.1.residual_group.blocks.5.norm2 1 1 input.381 16037 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.381=(1,2304,192)f32 #16037=(1,2304,192)f32 nn.Linear layers_mmsa.1.residual_group.blocks.5.mlp.fc1 1 1 16037 16042 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #16037=(1,2304,192)f32 #16042=(1,2304,384)f32 nn.GELU layers_mmsa.1.residual_group.blocks.5.mlp.act 1 1 16042 16043 #16042=(1,2304,384)f32 #16043=(1,2304,384)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.5.mlp.drop 1 1 16043 16044 #16043=(1,2304,384)f32 #16044=(1,2304,384)f32 nn.Linear layers_mmsa.1.residual_group.blocks.5.mlp.fc2 1 1 16044 16045 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #16044=(1,2304,384)f32 #16045=(1,2304,192)f32 nn.Dropout layers_mmsa.1.residual_group.blocks.5.mlp.drop 1 1 16045 16046 #16045=(1,2304,192)f32 #16046=(1,2304,192)f32 prim::Constant pnnx_17052 0 1 16047 value=None prim::Constant pnnx_17053 0 1 25384 value=1 aten::add pnnx_17054 3 1 input.381 16046 25384 16048 #input.381=(1,2304,192)f32 #16046=(1,2304,192)f32 #16048=(1,2304,192)f32 prim::Constant pnnx_17055 0 1 16049 value=0 prim::Constant pnnx_17056 0 1 16050 value=1 prim::Constant pnnx_17057 0 1 16051 value=2 prim::Constant pnnx_17058 0 1 16052 value=192 aten::size pnnx_17059 2 1 16048 16049 16053 #16048=(1,2304,192)f32 prim::NumToTensor pnnx_17060 1 1 16053 B.203 aten::Int pnnx_17061 1 1 B.203 16055 prim::ListConstruct pnnx_17063 4 1 16055 16052 1696 1936 16057 torch.transpose torch.transpose_3159 3 1 16048 16050 16051 16056 $input=16048 $dim0=16050 $dim1=16051 #16048=(1,2304,192)f32 #16056=(1,192,2304)f32 Tensor.view Tensor.view_1913 2 1 16056 16057 input.383 $input=16056 $shape=16057 #16056=(1,192,2304)f32 #input.383=(1,192,48,48)f32 nn.Conv2d layers_mmsa.1.conv 1 1 input.383 16059 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.383=(1,192,48,48)f32 #16059=(1,192,48,48)f32 prim::Constant pnnx_17065 0 1 16060 value=-1 prim::Constant pnnx_17066 0 1 16061 value=2 prim::Constant pnnx_17067 0 1 16062 value=1 prim::Constant pnnx_17069 0 1 25385 value=2 torch.flatten torch.flatten_2199 3 1 16059 16061 16060 16063 $input=16059 $start_dim=16061 $end_dim=16060 #16059=(1,192,48,48)f32 #16063=(1,192,2304)f32 torch.transpose torch.transpose_3160 3 1 16063 16062 25385 16064 $input=16063 $dim0=16062 $dim1=25385 #16063=(1,192,2304)f32 #16064=(1,2304,192)f32 aten::add pnnx_17071 3 1 16064 15083 15084 16065 #16064=(1,2304,192)f32 #15083=(1,2304,192)f32 #16065=(1,2304,192)f32 prim::Constant pnnx_17072 0 1 16066 value=1 prim::Constant pnnx_17073 0 1 16083 value=trunc prim::Constant pnnx_17074 0 1 16084 value=8 prim::Constant pnnx_17075 0 1 16085 value=0 prim::Constant pnnx_17076 0 1 16086 value=2 prim::Constant pnnx_17077 0 1 16087 value=1 prim::Constant pnnx_17078 0 1 16088 value=3 prim::Constant pnnx_17079 0 1 16089 value=8 prim::Constant pnnx_17080 0 1 16090 value=4 prim::Constant pnnx_17081 0 1 16091 value=5 prim::Constant pnnx_17082 0 1 16092 value=-1 prim::Constant pnnx_17083 0 1 16093 value=64 aten::size pnnx_17084 2 1 16065 16085 16099 #16065=(1,2304,192)f32 prim::NumToTensor pnnx_17085 1 1 16099 B.205 aten::Int pnnx_17086 1 1 B.205 16101 aten::Int pnnx_17087 1 1 B.205 16102 aten::size pnnx_17088 2 1 16065 16086 16103 #16065=(1,2304,192)f32 prim::NumToTensor pnnx_17089 1 1 16103 C.347 aten::Int pnnx_17090 1 1 C.347 16105 aten::Int pnnx_17091 1 1 C.347 16106 aten::Int pnnx_17092 1 1 C.347 16107 aten::Int pnnx_17093 1 1 C.347 16108 nn.LayerNorm layers_mmsa.2.residual_group.blocks.0.norm1 1 1 16065 16109 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #16065=(1,2304,192)f32 #16109=(1,2304,192)f32 prim::ListConstruct pnnx_17094 4 1 16102 1693 1933 16108 16110 prim::Constant pnnx_17096 0 1 25386 value=0 Tensor.view Tensor.view_1914 2 1 16109 16110 x.171 $input=16109 $shape=16110 #16109=(1,2304,192)f32 #x.171=(1,48,48,192)f32 aten::size pnnx_17097 2 1 x.171 25386 16112 #x.171=(1,48,48,192)f32 prim::NumToTensor pnnx_17098 1 1 16112 B0.99 aten::Int pnnx_17099 1 1 B0.99 16114 aten::size pnnx_17100 2 1 x.171 16087 16115 #x.171=(1,48,48,192)f32 prim::NumToTensor pnnx_17101 1 1 16115 16116 prim::Constant pnnx_17102 0 1 25387 value=2 aten::size pnnx_17103 2 1 x.171 25387 16117 #x.171=(1,48,48,192)f32 prim::NumToTensor pnnx_17104 1 1 16117 16118 aten::size pnnx_17105 2 1 x.171 16088 16119 #x.171=(1,48,48,192)f32 prim::NumToTensor pnnx_17106 1 1 16119 C0.99 aten::Int pnnx_17107 1 1 C0.99 16121 aten::Int pnnx_17108 1 1 C0.99 16122 aten::div pnnx_17109 3 1 16116 16084 16083 16123 aten::Int pnnx_17110 1 1 16123 16124 prim::Constant pnnx_17111 0 1 25388 value=8 prim::Constant pnnx_17112 0 1 25389 value=trunc aten::div pnnx_17113 3 1 16118 25388 25389 16125 aten::Int pnnx_17114 1 1 16125 16126 prim::Constant pnnx_17115 0 1 25390 value=8 prim::ListConstruct pnnx_17116 6 1 16114 16124 16089 16126 25390 16122 16127 prim::Constant pnnx_17118 0 1 25391 value=0 prim::Constant pnnx_17119 0 1 25392 value=1 prim::Constant pnnx_17120 0 1 25393 value=3 prim::Constant pnnx_17121 0 1 25394 value=2 prim::ListConstruct pnnx_17122 6 1 25391 25392 25393 25394 16090 16091 16129 Tensor.view Tensor.view_1915 2 1 x.171 16127 x0.99 $input=x.171 $shape=16127 #x.171=(1,48,48,192)f32 #x0.99=(1,6,8,6,8,192)f32 prim::Constant pnnx_17126 0 1 25396 value=8 prim::Constant pnnx_17127 0 1 25397 value=8 prim::ListConstruct pnnx_17128 4 1 16092 25396 25397 16121 16132 torch.permute torch.permute_2864 2 1 x0.99 16129 16130 $input=x0.99 $dims=16129 #x0.99=(1,6,8,6,8,192)f32 #16130=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_254 1 1 16130 16131 memory_format=torch.contiguous_format $input=16130 #16130=(1,6,6,8,8,192)f32 #16131=(1,6,6,8,8,192)f32 prim::Constant pnnx_17130 0 1 25398 value=-1 prim::ListConstruct pnnx_17131 3 1 25398 16093 16107 16134 prim::Constant pnnx_17133 0 1 16136 value=1.767767e-01 prim::Constant pnnx_17134 0 1 16137 value=trunc prim::Constant pnnx_17135 0 1 16138 value=6 prim::Constant pnnx_17136 0 1 16139 value=0 prim::Constant pnnx_17137 0 1 16140 value=1 prim::Constant pnnx_17138 0 1 16141 value=2 prim::Constant pnnx_17139 0 1 16142 value=3 prim::Constant pnnx_17140 0 1 16143 value=6 prim::Constant pnnx_17141 0 1 16144 value=4 prim::Constant pnnx_17142 0 1 16145 value=-2 prim::Constant pnnx_17143 0 1 16146 value=-1 prim::Constant pnnx_17144 0 1 16147 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.0.attn 0 1 relative_position_bias_table.171 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.171=(225,6)f32 pnnx.Attribute layers_mmsa.2.residual_group.blocks.0.attn 0 1 relative_position_index.171 @relative_position_index=(64,64)i64 #relative_position_index.171=(64,64)i64 Tensor.view Tensor.view_1916 2 1 16131 16132 x_windows.171 $input=16131 $shape=16132 #16131=(1,6,6,8,8,192)f32 #x_windows.171=(36,8,8,192)f32 Tensor.view Tensor.view_1917 2 1 x_windows.171 16134 x1.99 $input=x_windows.171 $shape=16134 #x_windows.171=(36,8,8,192)f32 #x1.99=(36,64,192)f32 aten::size pnnx_17145 2 1 x1.99 16139 16155 #x1.99=(36,64,192)f32 prim::NumToTensor pnnx_17146 1 1 16155 B_.171 aten::Int pnnx_17147 1 1 B_.171 16157 aten::Int pnnx_17148 1 1 B_.171 16158 aten::size pnnx_17149 2 1 x1.99 16140 16159 #x1.99=(36,64,192)f32 prim::NumToTensor pnnx_17150 1 1 16159 N.171 aten::Int pnnx_17151 1 1 N.171 16161 aten::Int pnnx_17152 1 1 N.171 16162 aten::size pnnx_17153 2 1 x1.99 16141 16163 #x1.99=(36,64,192)f32 prim::NumToTensor pnnx_17154 1 1 16163 C.349 aten::Int pnnx_17155 1 1 C.349 16165 nn.Linear layers_mmsa.2.residual_group.blocks.0.attn.qkv 1 1 x1.99 16166 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.99=(36,64,192)f32 #16166=(36,64,576)f32 aten::div pnnx_17156 3 1 C.349 16138 16137 16167 aten::Int pnnx_17157 1 1 16167 16168 prim::ListConstruct pnnx_17158 5 1 16158 16162 16142 16143 16168 16169 prim::Constant pnnx_17160 0 1 25399 value=2 prim::Constant pnnx_17161 0 1 25400 value=0 prim::Constant pnnx_17162 0 1 25401 value=3 prim::Constant pnnx_17163 0 1 25402 value=1 prim::ListConstruct pnnx_17164 5 1 25399 25400 25401 25402 16144 16171 Tensor.reshape Tensor.reshape_602 2 1 16166 16169 16170 $input=16166 $shape=16169 #16166=(36,64,576)f32 #16170=(36,64,3,6,32)f32 prim::Constant pnnx_17166 0 1 25403 value=0 prim::Constant pnnx_17167 0 1 25404 value=0 prim::Constant pnnx_17169 0 1 25405 value=0 prim::Constant pnnx_17170 0 1 25406 value=1 prim::Constant pnnx_17172 0 1 25407 value=0 prim::Constant pnnx_17173 0 1 25408 value=2 torch.permute torch.permute_2865 2 1 16170 16171 qkv0.99 $input=16170 $dims=16171 #16170=(36,64,3,6,32)f32 #qkv0.99=(3,36,6,64,32)f32 Tensor.select Tensor.select_902 3 1 qkv0.99 25403 25404 q.171 $input=qkv0.99 $dim=25403 $index=25404 #qkv0.99=(3,36,6,64,32)f32 #q.171=(36,6,64,32)f32 aten::mul pnnx_17175 2 1 q.171 16136 q0.99 #q.171=(36,6,64,32)f32 #q0.99=(36,6,64,32)f32 Tensor.select Tensor.select_903 3 1 qkv0.99 25405 25406 k.171 $input=qkv0.99 $dim=25405 $index=25406 #qkv0.99=(3,36,6,64,32)f32 #k.171=(36,6,64,32)f32 prim::Constant pnnx_17178 0 1 25409 value=-1 prim::ListConstruct pnnx_17179 1 1 25409 16179 Tensor.view Tensor.view_1918 2 1 relative_position_index.171 16179 16180 $input=relative_position_index.171 $shape=16179 #relative_position_index.171=(64,64)i64 #16180=(4096)i64 prim::ListConstruct pnnx_17181 1 1 16180 16181 #16180=(4096)i64 prim::Constant pnnx_17183 0 1 25410 value=64 prim::Constant pnnx_17184 0 1 25411 value=-1 prim::ListConstruct pnnx_17185 3 1 16147 25410 25411 16183 Tensor.index Tensor.index_410 2 1 relative_position_bias_table.171 16181 16182 $input=relative_position_bias_table.171 $expr=16181 #relative_position_bias_table.171=(225,6)f32 #16182=(4096,6)f32 prim::Constant pnnx_17187 0 1 25412 value=2 prim::Constant pnnx_17188 0 1 25413 value=0 prim::Constant pnnx_17189 0 1 25414 value=1 prim::ListConstruct pnnx_17190 3 1 25412 25413 25414 16185 Tensor.view Tensor.view_1919 2 1 16182 16183 relative_position_bias.171 $input=16182 $shape=16183 #16182=(4096,6)f32 #relative_position_bias.171=(64,64,6)f32 prim::Constant pnnx_17194 0 1 25416 value=0 torch.permute torch.permute_2866 2 1 relative_position_bias.171 16185 16186 $input=relative_position_bias.171 $dims=16185 #relative_position_bias.171=(64,64,6)f32 #16186=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_255 1 1 16186 relative_position_bias0.99 memory_format=torch.contiguous_format $input=16186 #16186=(6,64,64)f32 #relative_position_bias0.99=(6,64,64)f32 prim::Constant pnnx_17196 0 1 25417 value=1 torch.transpose torch.transpose_3161 3 1 k.171 16145 16146 16177 $input=k.171 $dim0=16145 $dim1=16146 #k.171=(36,6,64,32)f32 #16177=(36,6,32,64)f32 torch.matmul torch.matmul_2372 2 1 q0.99 16177 attn.343 $input=q0.99 $other=16177 #q0.99=(36,6,64,32)f32 #16177=(36,6,32,64)f32 #attn.343=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3386 2 1 relative_position_bias0.99 25416 16188 $input=relative_position_bias0.99 $dim=25416 #relative_position_bias0.99=(6,64,64)f32 #16188=(1,6,64,64)f32 aten::add pnnx_17197 3 1 attn.343 16188 25417 input.385 #attn.343=(36,6,64,64)f32 #16188=(1,6,64,64)f32 #input.385=(36,6,64,64)f32 nn.Softmax layers_mmsa.2.residual_group.blocks.0.attn.softmax 1 1 input.385 16190 dim=-1 #input.385=(36,6,64,64)f32 #16190=(36,6,64,64)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.0.attn.attn_drop 1 1 16190 16191 #16190=(36,6,64,64)f32 #16191=(36,6,64,64)f32 Tensor.select Tensor.select_904 3 1 qkv0.99 25407 25408 v.171 $input=qkv0.99 $dim=25407 $index=25408 #qkv0.99=(3,36,6,64,32)f32 #v.171=(36,6,64,32)f32 prim::Constant pnnx_17199 0 1 25418 value=1 prim::Constant pnnx_17200 0 1 25419 value=2 torch.matmul torch.matmul_2373 2 1 16191 v.171 16192 $input=16191 $other=v.171 #16191=(36,6,64,64)f32 #v.171=(36,6,64,32)f32 #16192=(36,6,64,32)f32 prim::ListConstruct pnnx_17202 3 1 16157 16161 16165 16194 torch.transpose torch.transpose_3162 3 1 16192 25418 25419 16193 $input=16192 $dim0=25418 $dim1=25419 #16192=(36,6,64,32)f32 #16193=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_603 2 1 16193 16194 input0.103 $input=16193 $shape=16194 #16193=(36,64,6,32)f32 #input0.103=(36,64,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.0.attn.proj 1 1 input0.103 16196 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.103=(36,64,192)f32 #16196=(36,64,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.0.attn.proj_drop 1 1 16196 16197 #16196=(36,64,192)f32 #16197=(36,64,192)f32 prim::Constant pnnx_17204 0 1 25420 value=-1 prim::Constant pnnx_17205 0 1 25421 value=8 prim::Constant pnnx_17206 0 1 25422 value=8 prim::ListConstruct pnnx_17207 4 1 25420 25421 25422 16106 16198 prim::Constant pnnx_17209 0 1 25423 value=8 prim::Constant pnnx_17210 0 1 25424 value=trunc aten::div pnnx_17211 3 1 H1.1 25423 25424 16200 aten::Int pnnx_17212 1 1 16200 16201 prim::Constant pnnx_17213 0 1 25425 value=8 prim::Constant pnnx_17214 0 1 25426 value=trunc aten::div pnnx_17215 3 1 W1.1 25425 25426 16202 aten::Int pnnx_17216 1 1 16202 16203 prim::Constant pnnx_17217 0 1 25427 value=1 prim::Constant pnnx_17218 0 1 25428 value=8 prim::Constant pnnx_17219 0 1 25429 value=8 prim::Constant pnnx_17220 0 1 25430 value=-1 prim::ListConstruct pnnx_17221 6 1 25427 16201 16203 25428 25429 25430 16204 prim::Constant pnnx_17223 0 1 25431 value=0 prim::Constant pnnx_17224 0 1 25432 value=1 prim::Constant pnnx_17225 0 1 25433 value=3 prim::Constant pnnx_17226 0 1 25434 value=2 prim::Constant pnnx_17227 0 1 25435 value=4 prim::Constant pnnx_17228 0 1 25436 value=5 prim::ListConstruct pnnx_17229 6 1 25431 25432 25433 25434 25435 25436 16206 Tensor.view Tensor.view_1920 2 1 16197 16198 windows.171 $input=16197 $shape=16198 #16197=(36,64,192)f32 #windows.171=(36,8,8,192)f32 Tensor.view Tensor.view_1921 2 1 windows.171 16204 x2.99 $input=windows.171 $shape=16204 #windows.171=(36,8,8,192)f32 #x2.99=(1,6,6,8,8,192)f32 prim::Constant pnnx_17233 0 1 25438 value=1 prim::Constant pnnx_17234 0 1 25439 value=-1 prim::ListConstruct pnnx_17235 4 1 25438 1690 1930 25439 16209 torch.permute torch.permute_2867 2 1 x2.99 16206 16207 $input=x2.99 $dims=16206 #x2.99=(1,6,6,8,8,192)f32 #16207=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_256 1 1 16207 16208 memory_format=torch.contiguous_format $input=16207 #16207=(1,6,8,6,8,192)f32 #16208=(1,6,8,6,8,192)f32 aten::mul pnnx_17237 2 1 H1.1 W1.1 16211 aten::Int pnnx_17238 1 1 16211 16212 prim::ListConstruct pnnx_17239 3 1 16101 16212 16105 16213 prim::Constant pnnx_17241 0 1 16215 value=None prim::Constant pnnx_17242 0 1 25440 value=1 Tensor.view Tensor.view_1922 2 1 16208 16209 x3.99 $input=16208 $shape=16209 #16208=(1,6,8,6,8,192)f32 #x3.99=(1,48,48,192)f32 Tensor.view Tensor.view_1923 2 1 x3.99 16213 x4.99 $input=x3.99 $shape=16213 #x3.99=(1,48,48,192)f32 #x4.99=(1,2304,192)f32 aten::add pnnx_17243 3 1 16065 x4.99 25440 input.387 #16065=(1,2304,192)f32 #x4.99=(1,2304,192)f32 #input.387=(1,2304,192)f32 nn.LayerNorm layers_mmsa.2.residual_group.blocks.0.norm2 1 1 input.387 16217 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.387=(1,2304,192)f32 #16217=(1,2304,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.0.mlp.fc1 1 1 16217 16222 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #16217=(1,2304,192)f32 #16222=(1,2304,384)f32 nn.GELU layers_mmsa.2.residual_group.blocks.0.mlp.act 1 1 16222 16223 #16222=(1,2304,384)f32 #16223=(1,2304,384)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.0.mlp.drop 1 1 16223 16224 #16223=(1,2304,384)f32 #16224=(1,2304,384)f32 nn.Linear layers_mmsa.2.residual_group.blocks.0.mlp.fc2 1 1 16224 16225 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #16224=(1,2304,384)f32 #16225=(1,2304,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.0.mlp.drop 1 1 16225 16226 #16225=(1,2304,192)f32 #16226=(1,2304,192)f32 prim::Constant pnnx_17244 0 1 16227 value=None prim::Constant pnnx_17245 0 1 25441 value=1 aten::add pnnx_17246 3 1 input.387 16226 25441 16228 #input.387=(1,2304,192)f32 #16226=(1,2304,192)f32 #16228=(1,2304,192)f32 prim::Constant pnnx_17247 0 1 16229 value=trunc prim::Constant pnnx_17248 0 1 16230 value=8 prim::Constant pnnx_17249 0 1 16231 value=0 prim::Constant pnnx_17250 0 1 16232 value=2 prim::Constant pnnx_17251 0 1 16233 value=-4 prim::Constant pnnx_17252 0 1 16234 value=1 prim::Constant pnnx_17253 0 1 16235 value=3 prim::Constant pnnx_17254 0 1 16236 value=8 prim::Constant pnnx_17255 0 1 16237 value=4 prim::Constant pnnx_17256 0 1 16238 value=5 prim::Constant pnnx_17257 0 1 16239 value=-1 prim::Constant pnnx_17258 0 1 16240 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.1 0 1 attn_mask.87 @attn_mask=(36,64,64)f32 #attn_mask.87=(36,64,64)f32 aten::size pnnx_17259 2 1 16228 16231 16247 #16228=(1,2304,192)f32 prim::NumToTensor pnnx_17260 1 1 16247 B.207 aten::Int pnnx_17261 1 1 B.207 16249 aten::Int pnnx_17262 1 1 B.207 16250 aten::size pnnx_17263 2 1 16228 16232 16251 #16228=(1,2304,192)f32 prim::NumToTensor pnnx_17264 1 1 16251 C.351 aten::Int pnnx_17265 1 1 C.351 16253 aten::Int pnnx_17266 1 1 C.351 16254 aten::Int pnnx_17267 1 1 C.351 16255 aten::Int pnnx_17268 1 1 C.351 16256 nn.LayerNorm layers_mmsa.2.residual_group.blocks.1.norm1 1 1 16228 16257 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #16228=(1,2304,192)f32 #16257=(1,2304,192)f32 prim::ListConstruct pnnx_17269 4 1 16250 1687 1927 16256 16258 prim::Constant pnnx_17271 0 1 25442 value=-4 prim::ListConstruct pnnx_17272 2 1 16233 25442 16260 prim::Constant pnnx_17273 0 1 25443 value=2 prim::ListConstruct pnnx_17274 2 1 16234 25443 16261 Tensor.view Tensor.view_1924 2 1 16257 16258 x.173 $input=16257 $shape=16258 #16257=(1,2304,192)f32 #x.173=(1,48,48,192)f32 prim::Constant pnnx_17276 0 1 25444 value=0 torch.roll torch.roll_2504 3 1 x.173 16260 16261 x0.101 $input=x.173 $shifts=16260 $dims=16261 #x.173=(1,48,48,192)f32 #x0.101=(1,48,48,192)f32 aten::size pnnx_17277 2 1 x0.101 25444 16263 #x0.101=(1,48,48,192)f32 prim::NumToTensor pnnx_17278 1 1 16263 B0.101 aten::Int pnnx_17279 1 1 B0.101 16265 prim::Constant pnnx_17280 0 1 25445 value=1 aten::size pnnx_17281 2 1 x0.101 25445 16266 #x0.101=(1,48,48,192)f32 prim::NumToTensor pnnx_17282 1 1 16266 16267 prim::Constant pnnx_17283 0 1 25446 value=2 aten::size pnnx_17284 2 1 x0.101 25446 16268 #x0.101=(1,48,48,192)f32 prim::NumToTensor pnnx_17285 1 1 16268 16269 aten::size pnnx_17286 2 1 x0.101 16235 16270 #x0.101=(1,48,48,192)f32 prim::NumToTensor pnnx_17287 1 1 16270 C0.101 aten::Int pnnx_17288 1 1 C0.101 16272 aten::Int pnnx_17289 1 1 C0.101 16273 aten::div pnnx_17290 3 1 16267 16230 16229 16274 aten::Int pnnx_17291 1 1 16274 16275 prim::Constant pnnx_17292 0 1 25447 value=8 prim::Constant pnnx_17293 0 1 25448 value=trunc aten::div pnnx_17294 3 1 16269 25447 25448 16276 aten::Int pnnx_17295 1 1 16276 16277 prim::Constant pnnx_17296 0 1 25449 value=8 prim::ListConstruct pnnx_17297 6 1 16265 16275 16236 16277 25449 16273 16278 prim::Constant pnnx_17299 0 1 25450 value=0 prim::Constant pnnx_17300 0 1 25451 value=1 prim::Constant pnnx_17301 0 1 25452 value=3 prim::Constant pnnx_17302 0 1 25453 value=2 prim::ListConstruct pnnx_17303 6 1 25450 25451 25452 25453 16237 16238 16280 Tensor.view Tensor.view_1925 2 1 x0.101 16278 x1.101 $input=x0.101 $shape=16278 #x0.101=(1,48,48,192)f32 #x1.101=(1,6,8,6,8,192)f32 prim::Constant pnnx_17307 0 1 25455 value=8 prim::Constant pnnx_17308 0 1 25456 value=8 prim::ListConstruct pnnx_17309 4 1 16239 25455 25456 16272 16283 torch.permute torch.permute_2868 2 1 x1.101 16280 16281 $input=x1.101 $dims=16280 #x1.101=(1,6,8,6,8,192)f32 #16281=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_257 1 1 16281 16282 memory_format=torch.contiguous_format $input=16281 #16281=(1,6,6,8,8,192)f32 #16282=(1,6,6,8,8,192)f32 prim::Constant pnnx_17311 0 1 25457 value=-1 prim::ListConstruct pnnx_17312 3 1 25457 16240 16255 16285 prim::Constant pnnx_17314 0 1 16287 value=1.767767e-01 prim::Constant pnnx_17315 0 1 16288 value=trunc prim::Constant pnnx_17316 0 1 16289 value=6 prim::Constant pnnx_17317 0 1 16290 value=0 prim::Constant pnnx_17318 0 1 16291 value=1 prim::Constant pnnx_17319 0 1 16292 value=2 prim::Constant pnnx_17320 0 1 16293 value=3 prim::Constant pnnx_17321 0 1 16294 value=6 prim::Constant pnnx_17322 0 1 16295 value=4 prim::Constant pnnx_17323 0 1 16296 value=-2 prim::Constant pnnx_17324 0 1 16297 value=-1 prim::Constant pnnx_17325 0 1 16298 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.1.attn 0 1 relative_position_bias_table.173 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.173=(225,6)f32 pnnx.Attribute layers_mmsa.2.residual_group.blocks.1.attn 0 1 relative_position_index.173 @relative_position_index=(64,64)i64 #relative_position_index.173=(64,64)i64 Tensor.view Tensor.view_1926 2 1 16282 16283 x_windows.173 $input=16282 $shape=16283 #16282=(1,6,6,8,8,192)f32 #x_windows.173=(36,8,8,192)f32 Tensor.view Tensor.view_1927 2 1 x_windows.173 16285 x2.101 $input=x_windows.173 $shape=16285 #x_windows.173=(36,8,8,192)f32 #x2.101=(36,64,192)f32 aten::size pnnx_17326 2 1 x2.101 16290 16306 #x2.101=(36,64,192)f32 prim::NumToTensor pnnx_17327 1 1 16306 B_.173 aten::Int pnnx_17328 1 1 B_.173 16308 aten::Int pnnx_17329 1 1 B_.173 16309 aten::size pnnx_17330 2 1 x2.101 16291 16310 #x2.101=(36,64,192)f32 prim::NumToTensor pnnx_17331 1 1 16310 N.173 aten::Int pnnx_17332 1 1 N.173 16312 aten::Int pnnx_17333 1 1 N.173 16313 aten::Int pnnx_17334 1 1 N.173 16314 aten::Int pnnx_17335 1 1 N.173 16315 aten::Int pnnx_17336 1 1 N.173 16316 aten::Int pnnx_17337 1 1 N.173 16317 aten::size pnnx_17338 2 1 x2.101 16292 16318 #x2.101=(36,64,192)f32 prim::NumToTensor pnnx_17339 1 1 16318 C.353 aten::Int pnnx_17340 1 1 C.353 16320 nn.Linear layers_mmsa.2.residual_group.blocks.1.attn.qkv 1 1 x2.101 16321 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.101=(36,64,192)f32 #16321=(36,64,576)f32 aten::div pnnx_17341 3 1 C.353 16289 16288 16322 aten::Int pnnx_17342 1 1 16322 16323 prim::ListConstruct pnnx_17343 5 1 16309 16317 16293 16294 16323 16324 prim::Constant pnnx_17345 0 1 25458 value=2 prim::Constant pnnx_17346 0 1 25459 value=0 prim::Constant pnnx_17347 0 1 25460 value=3 prim::Constant pnnx_17348 0 1 25461 value=1 prim::ListConstruct pnnx_17349 5 1 25458 25459 25460 25461 16295 16326 Tensor.reshape Tensor.reshape_604 2 1 16321 16324 16325 $input=16321 $shape=16324 #16321=(36,64,576)f32 #16325=(36,64,3,6,32)f32 prim::Constant pnnx_17351 0 1 25462 value=0 prim::Constant pnnx_17352 0 1 25463 value=0 prim::Constant pnnx_17354 0 1 25464 value=0 prim::Constant pnnx_17355 0 1 25465 value=1 prim::Constant pnnx_17357 0 1 25466 value=0 prim::Constant pnnx_17358 0 1 25467 value=2 torch.permute torch.permute_2869 2 1 16325 16326 qkv0.101 $input=16325 $dims=16326 #16325=(36,64,3,6,32)f32 #qkv0.101=(3,36,6,64,32)f32 Tensor.select Tensor.select_905 3 1 qkv0.101 25462 25463 q.173 $input=qkv0.101 $dim=25462 $index=25463 #qkv0.101=(3,36,6,64,32)f32 #q.173=(36,6,64,32)f32 aten::mul pnnx_17360 2 1 q.173 16287 q0.101 #q.173=(36,6,64,32)f32 #q0.101=(36,6,64,32)f32 Tensor.select Tensor.select_906 3 1 qkv0.101 25464 25465 k.173 $input=qkv0.101 $dim=25464 $index=25465 #qkv0.101=(3,36,6,64,32)f32 #k.173=(36,6,64,32)f32 prim::Constant pnnx_17363 0 1 25468 value=-1 prim::ListConstruct pnnx_17364 1 1 25468 16334 Tensor.view Tensor.view_1928 2 1 relative_position_index.173 16334 16335 $input=relative_position_index.173 $shape=16334 #relative_position_index.173=(64,64)i64 #16335=(4096)i64 prim::ListConstruct pnnx_17366 1 1 16335 16336 #16335=(4096)i64 prim::Constant pnnx_17368 0 1 25469 value=64 prim::Constant pnnx_17369 0 1 25470 value=-1 prim::ListConstruct pnnx_17370 3 1 16298 25469 25470 16338 Tensor.index Tensor.index_411 2 1 relative_position_bias_table.173 16336 16337 $input=relative_position_bias_table.173 $expr=16336 #relative_position_bias_table.173=(225,6)f32 #16337=(4096,6)f32 prim::Constant pnnx_17372 0 1 25471 value=2 prim::Constant pnnx_17373 0 1 25472 value=0 prim::Constant pnnx_17374 0 1 25473 value=1 prim::ListConstruct pnnx_17375 3 1 25471 25472 25473 16340 Tensor.view Tensor.view_1929 2 1 16337 16338 relative_position_bias.173 $input=16337 $shape=16338 #16337=(4096,6)f32 #relative_position_bias.173=(64,64,6)f32 prim::Constant pnnx_17379 0 1 25475 value=0 torch.permute torch.permute_2870 2 1 relative_position_bias.173 16340 16341 $input=relative_position_bias.173 $dims=16340 #relative_position_bias.173=(64,64,6)f32 #16341=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_258 1 1 16341 relative_position_bias0.101 memory_format=torch.contiguous_format $input=16341 #16341=(6,64,64)f32 #relative_position_bias0.101=(6,64,64)f32 prim::Constant pnnx_17381 0 1 25476 value=1 torch.transpose torch.transpose_3163 3 1 k.173 16296 16297 16332 $input=k.173 $dim0=16296 $dim1=16297 #k.173=(36,6,64,32)f32 #16332=(36,6,32,64)f32 torch.matmul torch.matmul_2374 2 1 q0.101 16332 attn.347 $input=q0.101 $other=16332 #q0.101=(36,6,64,32)f32 #16332=(36,6,32,64)f32 #attn.347=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3387 2 1 relative_position_bias0.101 25475 16343 $input=relative_position_bias0.101 $dim=25475 #relative_position_bias0.101=(6,64,64)f32 #16343=(1,6,64,64)f32 aten::add pnnx_17382 3 1 attn.347 16343 25476 attn0.51 #attn.347=(36,6,64,64)f32 #16343=(1,6,64,64)f32 #attn0.51=(36,6,64,64)f32 prim::Constant pnnx_17383 0 1 25477 value=0 aten::size pnnx_17384 2 1 attn_mask.87 25477 16345 #attn_mask.87=(36,64,64)f32 prim::NumToTensor pnnx_17385 1 1 16345 other.87 aten::Int pnnx_17386 1 1 other.87 16347 prim::Constant pnnx_17387 0 1 25478 value=trunc aten::div pnnx_17388 3 1 B_.173 other.87 25478 16348 aten::Int pnnx_17389 1 1 16348 16349 prim::Constant pnnx_17390 0 1 25479 value=6 prim::ListConstruct pnnx_17391 5 1 16349 16347 25479 16316 16315 16350 prim::Constant pnnx_17393 0 1 25480 value=1 prim::Constant pnnx_17395 0 1 25481 value=0 prim::Constant pnnx_17397 0 1 25482 value=1 Tensor.view Tensor.view_1930 2 1 attn0.51 16350 16351 $input=attn0.51 $shape=16350 #attn0.51=(36,6,64,64)f32 #16351=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3388 2 1 attn_mask.87 25480 16352 $input=attn_mask.87 $dim=25480 #attn_mask.87=(36,64,64)f32 #16352=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3389 2 1 16352 25481 16353 $input=16352 $dim=25481 #16352=(36,1,64,64)f32 #16353=(1,36,1,64,64)f32 aten::add pnnx_17398 3 1 16351 16353 25482 attn1.51 #16351=(1,36,6,64,64)f32 #16353=(1,36,1,64,64)f32 #attn1.51=(1,36,6,64,64)f32 prim::Constant pnnx_17399 0 1 25483 value=-1 prim::Constant pnnx_17400 0 1 25484 value=6 prim::ListConstruct pnnx_17401 4 1 25483 25484 16314 16313 16355 Tensor.view Tensor.view_1931 2 1 attn1.51 16355 input.389 $input=attn1.51 $shape=16355 #attn1.51=(1,36,6,64,64)f32 #input.389=(36,6,64,64)f32 nn.Softmax layers_mmsa.2.residual_group.blocks.1.attn.softmax 1 1 input.389 16357 dim=-1 #input.389=(36,6,64,64)f32 #16357=(36,6,64,64)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.1.attn.attn_drop 1 1 16357 16358 #16357=(36,6,64,64)f32 #16358=(36,6,64,64)f32 Tensor.select Tensor.select_907 3 1 qkv0.101 25466 25467 v.173 $input=qkv0.101 $dim=25466 $index=25467 #qkv0.101=(3,36,6,64,32)f32 #v.173=(36,6,64,32)f32 prim::Constant pnnx_17404 0 1 25485 value=1 prim::Constant pnnx_17405 0 1 25486 value=2 torch.matmul torch.matmul_2375 2 1 16358 v.173 16359 $input=16358 $other=v.173 #16358=(36,6,64,64)f32 #v.173=(36,6,64,32)f32 #16359=(36,6,64,32)f32 prim::ListConstruct pnnx_17407 3 1 16308 16312 16320 16361 torch.transpose torch.transpose_3164 3 1 16359 25485 25486 16360 $input=16359 $dim0=25485 $dim1=25486 #16359=(36,6,64,32)f32 #16360=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_605 2 1 16360 16361 input0.105 $input=16360 $shape=16361 #16360=(36,64,6,32)f32 #input0.105=(36,64,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.1.attn.proj 1 1 input0.105 16363 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.105=(36,64,192)f32 #16363=(36,64,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.1.attn.proj_drop 1 1 16363 16364 #16363=(36,64,192)f32 #16364=(36,64,192)f32 prim::Constant pnnx_17409 0 1 25487 value=-1 prim::Constant pnnx_17410 0 1 25488 value=8 prim::Constant pnnx_17411 0 1 25489 value=8 prim::ListConstruct pnnx_17412 4 1 25487 25488 25489 16254 16365 prim::Constant pnnx_17414 0 1 25490 value=8 prim::Constant pnnx_17415 0 1 25491 value=trunc aten::div pnnx_17416 3 1 H1.1 25490 25491 16367 aten::Int pnnx_17417 1 1 16367 16368 prim::Constant pnnx_17418 0 1 25492 value=8 prim::Constant pnnx_17419 0 1 25493 value=trunc aten::div pnnx_17420 3 1 W1.1 25492 25493 16369 aten::Int pnnx_17421 1 1 16369 16370 prim::Constant pnnx_17422 0 1 25494 value=1 prim::Constant pnnx_17423 0 1 25495 value=8 prim::Constant pnnx_17424 0 1 25496 value=8 prim::Constant pnnx_17425 0 1 25497 value=-1 prim::ListConstruct pnnx_17426 6 1 25494 16368 16370 25495 25496 25497 16371 prim::Constant pnnx_17428 0 1 25498 value=0 prim::Constant pnnx_17429 0 1 25499 value=1 prim::Constant pnnx_17430 0 1 25500 value=3 prim::Constant pnnx_17431 0 1 25501 value=2 prim::Constant pnnx_17432 0 1 25502 value=4 prim::Constant pnnx_17433 0 1 25503 value=5 prim::ListConstruct pnnx_17434 6 1 25498 25499 25500 25501 25502 25503 16373 Tensor.view Tensor.view_1932 2 1 16364 16365 windows.173 $input=16364 $shape=16365 #16364=(36,64,192)f32 #windows.173=(36,8,8,192)f32 Tensor.view Tensor.view_1933 2 1 windows.173 16371 x3.101 $input=windows.173 $shape=16371 #windows.173=(36,8,8,192)f32 #x3.101=(1,6,6,8,8,192)f32 prim::Constant pnnx_17438 0 1 25505 value=1 prim::Constant pnnx_17439 0 1 25506 value=-1 prim::ListConstruct pnnx_17440 4 1 25505 1684 1924 25506 16376 torch.permute torch.permute_2871 2 1 x3.101 16373 16374 $input=x3.101 $dims=16373 #x3.101=(1,6,6,8,8,192)f32 #16374=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_259 1 1 16374 16375 memory_format=torch.contiguous_format $input=16374 #16374=(1,6,8,6,8,192)f32 #16375=(1,6,8,6,8,192)f32 prim::Constant pnnx_17442 0 1 25507 value=4 prim::Constant pnnx_17443 0 1 25508 value=4 prim::ListConstruct pnnx_17444 2 1 25507 25508 16378 prim::Constant pnnx_17445 0 1 25509 value=1 prim::Constant pnnx_17446 0 1 25510 value=2 prim::ListConstruct pnnx_17447 2 1 25509 25510 16379 Tensor.view Tensor.view_1934 2 1 16375 16376 shifted_x.87 $input=16375 $shape=16376 #16375=(1,6,8,6,8,192)f32 #shifted_x.87=(1,48,48,192)f32 aten::mul pnnx_17449 2 1 H1.1 W1.1 16381 aten::Int pnnx_17450 1 1 16381 16382 prim::ListConstruct pnnx_17451 3 1 16249 16382 16253 16383 prim::Constant pnnx_17453 0 1 16385 value=None prim::Constant pnnx_17454 0 1 25511 value=1 torch.roll torch.roll_2505 3 1 shifted_x.87 16378 16379 x4.101 $input=shifted_x.87 $shifts=16378 $dims=16379 #shifted_x.87=(1,48,48,192)f32 #x4.101=(1,48,48,192)f32 Tensor.view Tensor.view_1935 2 1 x4.101 16383 x5.87 $input=x4.101 $shape=16383 #x4.101=(1,48,48,192)f32 #x5.87=(1,2304,192)f32 aten::add pnnx_17455 3 1 16228 x5.87 25511 input.391 #16228=(1,2304,192)f32 #x5.87=(1,2304,192)f32 #input.391=(1,2304,192)f32 nn.LayerNorm layers_mmsa.2.residual_group.blocks.1.norm2 1 1 input.391 16387 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.391=(1,2304,192)f32 #16387=(1,2304,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.1.mlp.fc1 1 1 16387 16392 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #16387=(1,2304,192)f32 #16392=(1,2304,384)f32 nn.GELU layers_mmsa.2.residual_group.blocks.1.mlp.act 1 1 16392 16393 #16392=(1,2304,384)f32 #16393=(1,2304,384)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.1.mlp.drop 1 1 16393 16394 #16393=(1,2304,384)f32 #16394=(1,2304,384)f32 nn.Linear layers_mmsa.2.residual_group.blocks.1.mlp.fc2 1 1 16394 16395 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #16394=(1,2304,384)f32 #16395=(1,2304,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.1.mlp.drop 1 1 16395 16396 #16395=(1,2304,192)f32 #16396=(1,2304,192)f32 prim::Constant pnnx_17456 0 1 16397 value=None prim::Constant pnnx_17457 0 1 25512 value=1 aten::add pnnx_17458 3 1 input.391 16396 25512 16398 #input.391=(1,2304,192)f32 #16396=(1,2304,192)f32 #16398=(1,2304,192)f32 prim::Constant pnnx_17459 0 1 16399 value=trunc prim::Constant pnnx_17460 0 1 16400 value=8 prim::Constant pnnx_17461 0 1 16401 value=0 prim::Constant pnnx_17462 0 1 16402 value=2 prim::Constant pnnx_17463 0 1 16403 value=1 prim::Constant pnnx_17464 0 1 16404 value=3 prim::Constant pnnx_17465 0 1 16405 value=8 prim::Constant pnnx_17466 0 1 16406 value=4 prim::Constant pnnx_17467 0 1 16407 value=5 prim::Constant pnnx_17468 0 1 16408 value=-1 prim::Constant pnnx_17469 0 1 16409 value=64 aten::size pnnx_17470 2 1 16398 16401 16415 #16398=(1,2304,192)f32 prim::NumToTensor pnnx_17471 1 1 16415 B.209 aten::Int pnnx_17472 1 1 B.209 16417 aten::Int pnnx_17473 1 1 B.209 16418 aten::size pnnx_17474 2 1 16398 16402 16419 #16398=(1,2304,192)f32 prim::NumToTensor pnnx_17475 1 1 16419 C.355 aten::Int pnnx_17476 1 1 C.355 16421 aten::Int pnnx_17477 1 1 C.355 16422 aten::Int pnnx_17478 1 1 C.355 16423 aten::Int pnnx_17479 1 1 C.355 16424 nn.LayerNorm layers_mmsa.2.residual_group.blocks.2.norm1 1 1 16398 16425 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #16398=(1,2304,192)f32 #16425=(1,2304,192)f32 prim::ListConstruct pnnx_17480 4 1 16418 1681 1921 16424 16426 prim::Constant pnnx_17482 0 1 25513 value=0 Tensor.view Tensor.view_1936 2 1 16425 16426 x.175 $input=16425 $shape=16426 #16425=(1,2304,192)f32 #x.175=(1,48,48,192)f32 aten::size pnnx_17483 2 1 x.175 25513 16428 #x.175=(1,48,48,192)f32 prim::NumToTensor pnnx_17484 1 1 16428 B0.103 aten::Int pnnx_17485 1 1 B0.103 16430 aten::size pnnx_17486 2 1 x.175 16403 16431 #x.175=(1,48,48,192)f32 prim::NumToTensor pnnx_17487 1 1 16431 16432 prim::Constant pnnx_17488 0 1 25514 value=2 aten::size pnnx_17489 2 1 x.175 25514 16433 #x.175=(1,48,48,192)f32 prim::NumToTensor pnnx_17490 1 1 16433 16434 aten::size pnnx_17491 2 1 x.175 16404 16435 #x.175=(1,48,48,192)f32 prim::NumToTensor pnnx_17492 1 1 16435 C0.103 aten::Int pnnx_17493 1 1 C0.103 16437 aten::Int pnnx_17494 1 1 C0.103 16438 aten::div pnnx_17495 3 1 16432 16400 16399 16439 aten::Int pnnx_17496 1 1 16439 16440 prim::Constant pnnx_17497 0 1 25515 value=8 prim::Constant pnnx_17498 0 1 25516 value=trunc aten::div pnnx_17499 3 1 16434 25515 25516 16441 aten::Int pnnx_17500 1 1 16441 16442 prim::Constant pnnx_17501 0 1 25517 value=8 prim::ListConstruct pnnx_17502 6 1 16430 16440 16405 16442 25517 16438 16443 prim::Constant pnnx_17504 0 1 25518 value=0 prim::Constant pnnx_17505 0 1 25519 value=1 prim::Constant pnnx_17506 0 1 25520 value=3 prim::Constant pnnx_17507 0 1 25521 value=2 prim::ListConstruct pnnx_17508 6 1 25518 25519 25520 25521 16406 16407 16445 Tensor.view Tensor.view_1937 2 1 x.175 16443 x0.103 $input=x.175 $shape=16443 #x.175=(1,48,48,192)f32 #x0.103=(1,6,8,6,8,192)f32 prim::Constant pnnx_17512 0 1 25523 value=8 prim::Constant pnnx_17513 0 1 25524 value=8 prim::ListConstruct pnnx_17514 4 1 16408 25523 25524 16437 16448 torch.permute torch.permute_2872 2 1 x0.103 16445 16446 $input=x0.103 $dims=16445 #x0.103=(1,6,8,6,8,192)f32 #16446=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_260 1 1 16446 16447 memory_format=torch.contiguous_format $input=16446 #16446=(1,6,6,8,8,192)f32 #16447=(1,6,6,8,8,192)f32 prim::Constant pnnx_17516 0 1 25525 value=-1 prim::ListConstruct pnnx_17517 3 1 25525 16409 16423 16450 prim::Constant pnnx_17519 0 1 16452 value=1.767767e-01 prim::Constant pnnx_17520 0 1 16453 value=trunc prim::Constant pnnx_17521 0 1 16454 value=6 prim::Constant pnnx_17522 0 1 16455 value=0 prim::Constant pnnx_17523 0 1 16456 value=1 prim::Constant pnnx_17524 0 1 16457 value=2 prim::Constant pnnx_17525 0 1 16458 value=3 prim::Constant pnnx_17526 0 1 16459 value=6 prim::Constant pnnx_17527 0 1 16460 value=4 prim::Constant pnnx_17528 0 1 16461 value=-2 prim::Constant pnnx_17529 0 1 16462 value=-1 prim::Constant pnnx_17530 0 1 16463 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.2.attn 0 1 relative_position_bias_table.175 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.175=(225,6)f32 pnnx.Attribute layers_mmsa.2.residual_group.blocks.2.attn 0 1 relative_position_index.175 @relative_position_index=(64,64)i64 #relative_position_index.175=(64,64)i64 Tensor.view Tensor.view_1938 2 1 16447 16448 x_windows.175 $input=16447 $shape=16448 #16447=(1,6,6,8,8,192)f32 #x_windows.175=(36,8,8,192)f32 Tensor.view Tensor.view_1939 2 1 x_windows.175 16450 x1.103 $input=x_windows.175 $shape=16450 #x_windows.175=(36,8,8,192)f32 #x1.103=(36,64,192)f32 aten::size pnnx_17531 2 1 x1.103 16455 16471 #x1.103=(36,64,192)f32 prim::NumToTensor pnnx_17532 1 1 16471 B_.175 aten::Int pnnx_17533 1 1 B_.175 16473 aten::Int pnnx_17534 1 1 B_.175 16474 aten::size pnnx_17535 2 1 x1.103 16456 16475 #x1.103=(36,64,192)f32 prim::NumToTensor pnnx_17536 1 1 16475 N.175 aten::Int pnnx_17537 1 1 N.175 16477 aten::Int pnnx_17538 1 1 N.175 16478 aten::size pnnx_17539 2 1 x1.103 16457 16479 #x1.103=(36,64,192)f32 prim::NumToTensor pnnx_17540 1 1 16479 C.357 aten::Int pnnx_17541 1 1 C.357 16481 nn.Linear layers_mmsa.2.residual_group.blocks.2.attn.qkv 1 1 x1.103 16482 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.103=(36,64,192)f32 #16482=(36,64,576)f32 aten::div pnnx_17542 3 1 C.357 16454 16453 16483 aten::Int pnnx_17543 1 1 16483 16484 prim::ListConstruct pnnx_17544 5 1 16474 16478 16458 16459 16484 16485 prim::Constant pnnx_17546 0 1 25526 value=2 prim::Constant pnnx_17547 0 1 25527 value=0 prim::Constant pnnx_17548 0 1 25528 value=3 prim::Constant pnnx_17549 0 1 25529 value=1 prim::ListConstruct pnnx_17550 5 1 25526 25527 25528 25529 16460 16487 Tensor.reshape Tensor.reshape_606 2 1 16482 16485 16486 $input=16482 $shape=16485 #16482=(36,64,576)f32 #16486=(36,64,3,6,32)f32 prim::Constant pnnx_17552 0 1 25530 value=0 prim::Constant pnnx_17553 0 1 25531 value=0 prim::Constant pnnx_17555 0 1 25532 value=0 prim::Constant pnnx_17556 0 1 25533 value=1 prim::Constant pnnx_17558 0 1 25534 value=0 prim::Constant pnnx_17559 0 1 25535 value=2 torch.permute torch.permute_2873 2 1 16486 16487 qkv0.103 $input=16486 $dims=16487 #16486=(36,64,3,6,32)f32 #qkv0.103=(3,36,6,64,32)f32 Tensor.select Tensor.select_908 3 1 qkv0.103 25530 25531 q.175 $input=qkv0.103 $dim=25530 $index=25531 #qkv0.103=(3,36,6,64,32)f32 #q.175=(36,6,64,32)f32 aten::mul pnnx_17561 2 1 q.175 16452 q0.103 #q.175=(36,6,64,32)f32 #q0.103=(36,6,64,32)f32 Tensor.select Tensor.select_909 3 1 qkv0.103 25532 25533 k.175 $input=qkv0.103 $dim=25532 $index=25533 #qkv0.103=(3,36,6,64,32)f32 #k.175=(36,6,64,32)f32 prim::Constant pnnx_17564 0 1 25536 value=-1 prim::ListConstruct pnnx_17565 1 1 25536 16495 Tensor.view Tensor.view_1940 2 1 relative_position_index.175 16495 16496 $input=relative_position_index.175 $shape=16495 #relative_position_index.175=(64,64)i64 #16496=(4096)i64 prim::ListConstruct pnnx_17567 1 1 16496 16497 #16496=(4096)i64 prim::Constant pnnx_17569 0 1 25537 value=64 prim::Constant pnnx_17570 0 1 25538 value=-1 prim::ListConstruct pnnx_17571 3 1 16463 25537 25538 16499 Tensor.index Tensor.index_412 2 1 relative_position_bias_table.175 16497 16498 $input=relative_position_bias_table.175 $expr=16497 #relative_position_bias_table.175=(225,6)f32 #16498=(4096,6)f32 prim::Constant pnnx_17573 0 1 25539 value=2 prim::Constant pnnx_17574 0 1 25540 value=0 prim::Constant pnnx_17575 0 1 25541 value=1 prim::ListConstruct pnnx_17576 3 1 25539 25540 25541 16501 Tensor.view Tensor.view_1941 2 1 16498 16499 relative_position_bias.175 $input=16498 $shape=16499 #16498=(4096,6)f32 #relative_position_bias.175=(64,64,6)f32 prim::Constant pnnx_17580 0 1 25543 value=0 torch.permute torch.permute_2874 2 1 relative_position_bias.175 16501 16502 $input=relative_position_bias.175 $dims=16501 #relative_position_bias.175=(64,64,6)f32 #16502=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_261 1 1 16502 relative_position_bias0.103 memory_format=torch.contiguous_format $input=16502 #16502=(6,64,64)f32 #relative_position_bias0.103=(6,64,64)f32 prim::Constant pnnx_17582 0 1 25544 value=1 torch.transpose torch.transpose_3165 3 1 k.175 16461 16462 16493 $input=k.175 $dim0=16461 $dim1=16462 #k.175=(36,6,64,32)f32 #16493=(36,6,32,64)f32 torch.matmul torch.matmul_2376 2 1 q0.103 16493 attn.351 $input=q0.103 $other=16493 #q0.103=(36,6,64,32)f32 #16493=(36,6,32,64)f32 #attn.351=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3390 2 1 relative_position_bias0.103 25543 16504 $input=relative_position_bias0.103 $dim=25543 #relative_position_bias0.103=(6,64,64)f32 #16504=(1,6,64,64)f32 aten::add pnnx_17583 3 1 attn.351 16504 25544 input.393 #attn.351=(36,6,64,64)f32 #16504=(1,6,64,64)f32 #input.393=(36,6,64,64)f32 nn.Softmax layers_mmsa.2.residual_group.blocks.2.attn.softmax 1 1 input.393 16506 dim=-1 #input.393=(36,6,64,64)f32 #16506=(36,6,64,64)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.2.attn.attn_drop 1 1 16506 16507 #16506=(36,6,64,64)f32 #16507=(36,6,64,64)f32 Tensor.select Tensor.select_910 3 1 qkv0.103 25534 25535 v.175 $input=qkv0.103 $dim=25534 $index=25535 #qkv0.103=(3,36,6,64,32)f32 #v.175=(36,6,64,32)f32 prim::Constant pnnx_17585 0 1 25545 value=1 prim::Constant pnnx_17586 0 1 25546 value=2 torch.matmul torch.matmul_2377 2 1 16507 v.175 16508 $input=16507 $other=v.175 #16507=(36,6,64,64)f32 #v.175=(36,6,64,32)f32 #16508=(36,6,64,32)f32 prim::ListConstruct pnnx_17588 3 1 16473 16477 16481 16510 torch.transpose torch.transpose_3166 3 1 16508 25545 25546 16509 $input=16508 $dim0=25545 $dim1=25546 #16508=(36,6,64,32)f32 #16509=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_607 2 1 16509 16510 input0.107 $input=16509 $shape=16510 #16509=(36,64,6,32)f32 #input0.107=(36,64,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.2.attn.proj 1 1 input0.107 16512 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.107=(36,64,192)f32 #16512=(36,64,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.2.attn.proj_drop 1 1 16512 16513 #16512=(36,64,192)f32 #16513=(36,64,192)f32 prim::Constant pnnx_17590 0 1 25547 value=-1 prim::Constant pnnx_17591 0 1 25548 value=8 prim::Constant pnnx_17592 0 1 25549 value=8 prim::ListConstruct pnnx_17593 4 1 25547 25548 25549 16422 16514 prim::Constant pnnx_17595 0 1 25550 value=8 prim::Constant pnnx_17596 0 1 25551 value=trunc aten::div pnnx_17597 3 1 H1.1 25550 25551 16516 aten::Int pnnx_17598 1 1 16516 16517 prim::Constant pnnx_17599 0 1 25552 value=8 prim::Constant pnnx_17600 0 1 25553 value=trunc aten::div pnnx_17601 3 1 W1.1 25552 25553 16518 aten::Int pnnx_17602 1 1 16518 16519 prim::Constant pnnx_17603 0 1 25554 value=1 prim::Constant pnnx_17604 0 1 25555 value=8 prim::Constant pnnx_17605 0 1 25556 value=8 prim::Constant pnnx_17606 0 1 25557 value=-1 prim::ListConstruct pnnx_17607 6 1 25554 16517 16519 25555 25556 25557 16520 prim::Constant pnnx_17609 0 1 25558 value=0 prim::Constant pnnx_17610 0 1 25559 value=1 prim::Constant pnnx_17611 0 1 25560 value=3 prim::Constant pnnx_17612 0 1 25561 value=2 prim::Constant pnnx_17613 0 1 25562 value=4 prim::Constant pnnx_17614 0 1 25563 value=5 prim::ListConstruct pnnx_17615 6 1 25558 25559 25560 25561 25562 25563 16522 Tensor.view Tensor.view_1942 2 1 16513 16514 windows.175 $input=16513 $shape=16514 #16513=(36,64,192)f32 #windows.175=(36,8,8,192)f32 Tensor.view Tensor.view_1943 2 1 windows.175 16520 x2.103 $input=windows.175 $shape=16520 #windows.175=(36,8,8,192)f32 #x2.103=(1,6,6,8,8,192)f32 prim::Constant pnnx_17619 0 1 25565 value=1 prim::Constant pnnx_17620 0 1 25566 value=-1 prim::ListConstruct pnnx_17621 4 1 25565 1678 1918 25566 16525 torch.permute torch.permute_2875 2 1 x2.103 16522 16523 $input=x2.103 $dims=16522 #x2.103=(1,6,6,8,8,192)f32 #16523=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_262 1 1 16523 16524 memory_format=torch.contiguous_format $input=16523 #16523=(1,6,8,6,8,192)f32 #16524=(1,6,8,6,8,192)f32 aten::mul pnnx_17623 2 1 H1.1 W1.1 16527 aten::Int pnnx_17624 1 1 16527 16528 prim::ListConstruct pnnx_17625 3 1 16417 16528 16421 16529 prim::Constant pnnx_17627 0 1 16531 value=None prim::Constant pnnx_17628 0 1 25567 value=1 Tensor.view Tensor.view_1944 2 1 16524 16525 x3.103 $input=16524 $shape=16525 #16524=(1,6,8,6,8,192)f32 #x3.103=(1,48,48,192)f32 Tensor.view Tensor.view_1945 2 1 x3.103 16529 x4.103 $input=x3.103 $shape=16529 #x3.103=(1,48,48,192)f32 #x4.103=(1,2304,192)f32 aten::add pnnx_17629 3 1 16398 x4.103 25567 input.395 #16398=(1,2304,192)f32 #x4.103=(1,2304,192)f32 #input.395=(1,2304,192)f32 nn.LayerNorm layers_mmsa.2.residual_group.blocks.2.norm2 1 1 input.395 16533 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.395=(1,2304,192)f32 #16533=(1,2304,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.2.mlp.fc1 1 1 16533 16538 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #16533=(1,2304,192)f32 #16538=(1,2304,384)f32 nn.GELU layers_mmsa.2.residual_group.blocks.2.mlp.act 1 1 16538 16539 #16538=(1,2304,384)f32 #16539=(1,2304,384)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.2.mlp.drop 1 1 16539 16540 #16539=(1,2304,384)f32 #16540=(1,2304,384)f32 nn.Linear layers_mmsa.2.residual_group.blocks.2.mlp.fc2 1 1 16540 16541 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #16540=(1,2304,384)f32 #16541=(1,2304,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.2.mlp.drop 1 1 16541 16542 #16541=(1,2304,192)f32 #16542=(1,2304,192)f32 prim::Constant pnnx_17630 0 1 16543 value=None prim::Constant pnnx_17631 0 1 25568 value=1 aten::add pnnx_17632 3 1 input.395 16542 25568 16544 #input.395=(1,2304,192)f32 #16542=(1,2304,192)f32 #16544=(1,2304,192)f32 prim::Constant pnnx_17633 0 1 16545 value=trunc prim::Constant pnnx_17634 0 1 16546 value=8 prim::Constant pnnx_17635 0 1 16547 value=0 prim::Constant pnnx_17636 0 1 16548 value=2 prim::Constant pnnx_17637 0 1 16549 value=-4 prim::Constant pnnx_17638 0 1 16550 value=1 prim::Constant pnnx_17639 0 1 16551 value=3 prim::Constant pnnx_17640 0 1 16552 value=8 prim::Constant pnnx_17641 0 1 16553 value=4 prim::Constant pnnx_17642 0 1 16554 value=5 prim::Constant pnnx_17643 0 1 16555 value=-1 prim::Constant pnnx_17644 0 1 16556 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.3 0 1 attn_mask.89 @attn_mask=(36,64,64)f32 #attn_mask.89=(36,64,64)f32 aten::size pnnx_17645 2 1 16544 16547 16563 #16544=(1,2304,192)f32 prim::NumToTensor pnnx_17646 1 1 16563 B.211 aten::Int pnnx_17647 1 1 B.211 16565 aten::Int pnnx_17648 1 1 B.211 16566 aten::size pnnx_17649 2 1 16544 16548 16567 #16544=(1,2304,192)f32 prim::NumToTensor pnnx_17650 1 1 16567 C.359 aten::Int pnnx_17651 1 1 C.359 16569 aten::Int pnnx_17652 1 1 C.359 16570 aten::Int pnnx_17653 1 1 C.359 16571 aten::Int pnnx_17654 1 1 C.359 16572 nn.LayerNorm layers_mmsa.2.residual_group.blocks.3.norm1 1 1 16544 16573 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #16544=(1,2304,192)f32 #16573=(1,2304,192)f32 prim::ListConstruct pnnx_17655 4 1 16566 1675 1915 16572 16574 prim::Constant pnnx_17657 0 1 25569 value=-4 prim::ListConstruct pnnx_17658 2 1 16549 25569 16576 prim::Constant pnnx_17659 0 1 25570 value=2 prim::ListConstruct pnnx_17660 2 1 16550 25570 16577 Tensor.view Tensor.view_1946 2 1 16573 16574 x.177 $input=16573 $shape=16574 #16573=(1,2304,192)f32 #x.177=(1,48,48,192)f32 prim::Constant pnnx_17662 0 1 25571 value=0 torch.roll torch.roll_2506 3 1 x.177 16576 16577 x0.105 $input=x.177 $shifts=16576 $dims=16577 #x.177=(1,48,48,192)f32 #x0.105=(1,48,48,192)f32 aten::size pnnx_17663 2 1 x0.105 25571 16579 #x0.105=(1,48,48,192)f32 prim::NumToTensor pnnx_17664 1 1 16579 B0.105 aten::Int pnnx_17665 1 1 B0.105 16581 prim::Constant pnnx_17666 0 1 25572 value=1 aten::size pnnx_17667 2 1 x0.105 25572 16582 #x0.105=(1,48,48,192)f32 prim::NumToTensor pnnx_17668 1 1 16582 16583 prim::Constant pnnx_17669 0 1 25573 value=2 aten::size pnnx_17670 2 1 x0.105 25573 16584 #x0.105=(1,48,48,192)f32 prim::NumToTensor pnnx_17671 1 1 16584 16585 aten::size pnnx_17672 2 1 x0.105 16551 16586 #x0.105=(1,48,48,192)f32 prim::NumToTensor pnnx_17673 1 1 16586 C0.105 aten::Int pnnx_17674 1 1 C0.105 16588 aten::Int pnnx_17675 1 1 C0.105 16589 aten::div pnnx_17676 3 1 16583 16546 16545 16590 aten::Int pnnx_17677 1 1 16590 16591 prim::Constant pnnx_17678 0 1 25574 value=8 prim::Constant pnnx_17679 0 1 25575 value=trunc aten::div pnnx_17680 3 1 16585 25574 25575 16592 aten::Int pnnx_17681 1 1 16592 16593 prim::Constant pnnx_17682 0 1 25576 value=8 prim::ListConstruct pnnx_17683 6 1 16581 16591 16552 16593 25576 16589 16594 prim::Constant pnnx_17685 0 1 25577 value=0 prim::Constant pnnx_17686 0 1 25578 value=1 prim::Constant pnnx_17687 0 1 25579 value=3 prim::Constant pnnx_17688 0 1 25580 value=2 prim::ListConstruct pnnx_17689 6 1 25577 25578 25579 25580 16553 16554 16596 Tensor.view Tensor.view_1947 2 1 x0.105 16594 x1.105 $input=x0.105 $shape=16594 #x0.105=(1,48,48,192)f32 #x1.105=(1,6,8,6,8,192)f32 prim::Constant pnnx_17693 0 1 25582 value=8 prim::Constant pnnx_17694 0 1 25583 value=8 prim::ListConstruct pnnx_17695 4 1 16555 25582 25583 16588 16599 torch.permute torch.permute_2876 2 1 x1.105 16596 16597 $input=x1.105 $dims=16596 #x1.105=(1,6,8,6,8,192)f32 #16597=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_263 1 1 16597 16598 memory_format=torch.contiguous_format $input=16597 #16597=(1,6,6,8,8,192)f32 #16598=(1,6,6,8,8,192)f32 prim::Constant pnnx_17697 0 1 25584 value=-1 prim::ListConstruct pnnx_17698 3 1 25584 16556 16571 16601 prim::Constant pnnx_17700 0 1 16603 value=1.767767e-01 prim::Constant pnnx_17701 0 1 16604 value=trunc prim::Constant pnnx_17702 0 1 16605 value=6 prim::Constant pnnx_17703 0 1 16606 value=0 prim::Constant pnnx_17704 0 1 16607 value=1 prim::Constant pnnx_17705 0 1 16608 value=2 prim::Constant pnnx_17706 0 1 16609 value=3 prim::Constant pnnx_17707 0 1 16610 value=6 prim::Constant pnnx_17708 0 1 16611 value=4 prim::Constant pnnx_17709 0 1 16612 value=-2 prim::Constant pnnx_17710 0 1 16613 value=-1 prim::Constant pnnx_17711 0 1 16614 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.3.attn 0 1 relative_position_bias_table.177 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.177=(225,6)f32 pnnx.Attribute layers_mmsa.2.residual_group.blocks.3.attn 0 1 relative_position_index.177 @relative_position_index=(64,64)i64 #relative_position_index.177=(64,64)i64 Tensor.view Tensor.view_1948 2 1 16598 16599 x_windows.177 $input=16598 $shape=16599 #16598=(1,6,6,8,8,192)f32 #x_windows.177=(36,8,8,192)f32 Tensor.view Tensor.view_1949 2 1 x_windows.177 16601 x2.105 $input=x_windows.177 $shape=16601 #x_windows.177=(36,8,8,192)f32 #x2.105=(36,64,192)f32 aten::size pnnx_17712 2 1 x2.105 16606 16622 #x2.105=(36,64,192)f32 prim::NumToTensor pnnx_17713 1 1 16622 B_.177 aten::Int pnnx_17714 1 1 B_.177 16624 aten::Int pnnx_17715 1 1 B_.177 16625 aten::size pnnx_17716 2 1 x2.105 16607 16626 #x2.105=(36,64,192)f32 prim::NumToTensor pnnx_17717 1 1 16626 N.177 aten::Int pnnx_17718 1 1 N.177 16628 aten::Int pnnx_17719 1 1 N.177 16629 aten::Int pnnx_17720 1 1 N.177 16630 aten::Int pnnx_17721 1 1 N.177 16631 aten::Int pnnx_17722 1 1 N.177 16632 aten::Int pnnx_17723 1 1 N.177 16633 aten::size pnnx_17724 2 1 x2.105 16608 16634 #x2.105=(36,64,192)f32 prim::NumToTensor pnnx_17725 1 1 16634 C.361 aten::Int pnnx_17726 1 1 C.361 16636 nn.Linear layers_mmsa.2.residual_group.blocks.3.attn.qkv 1 1 x2.105 16637 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.105=(36,64,192)f32 #16637=(36,64,576)f32 aten::div pnnx_17727 3 1 C.361 16605 16604 16638 aten::Int pnnx_17728 1 1 16638 16639 prim::ListConstruct pnnx_17729 5 1 16625 16633 16609 16610 16639 16640 prim::Constant pnnx_17731 0 1 25585 value=2 prim::Constant pnnx_17732 0 1 25586 value=0 prim::Constant pnnx_17733 0 1 25587 value=3 prim::Constant pnnx_17734 0 1 25588 value=1 prim::ListConstruct pnnx_17735 5 1 25585 25586 25587 25588 16611 16642 Tensor.reshape Tensor.reshape_608 2 1 16637 16640 16641 $input=16637 $shape=16640 #16637=(36,64,576)f32 #16641=(36,64,3,6,32)f32 prim::Constant pnnx_17737 0 1 25589 value=0 prim::Constant pnnx_17738 0 1 25590 value=0 prim::Constant pnnx_17740 0 1 25591 value=0 prim::Constant pnnx_17741 0 1 25592 value=1 prim::Constant pnnx_17743 0 1 25593 value=0 prim::Constant pnnx_17744 0 1 25594 value=2 torch.permute torch.permute_2877 2 1 16641 16642 qkv0.105 $input=16641 $dims=16642 #16641=(36,64,3,6,32)f32 #qkv0.105=(3,36,6,64,32)f32 Tensor.select Tensor.select_911 3 1 qkv0.105 25589 25590 q.177 $input=qkv0.105 $dim=25589 $index=25590 #qkv0.105=(3,36,6,64,32)f32 #q.177=(36,6,64,32)f32 aten::mul pnnx_17746 2 1 q.177 16603 q0.105 #q.177=(36,6,64,32)f32 #q0.105=(36,6,64,32)f32 Tensor.select Tensor.select_912 3 1 qkv0.105 25591 25592 k.177 $input=qkv0.105 $dim=25591 $index=25592 #qkv0.105=(3,36,6,64,32)f32 #k.177=(36,6,64,32)f32 prim::Constant pnnx_17749 0 1 25595 value=-1 prim::ListConstruct pnnx_17750 1 1 25595 16650 Tensor.view Tensor.view_1950 2 1 relative_position_index.177 16650 16651 $input=relative_position_index.177 $shape=16650 #relative_position_index.177=(64,64)i64 #16651=(4096)i64 prim::ListConstruct pnnx_17752 1 1 16651 16652 #16651=(4096)i64 prim::Constant pnnx_17754 0 1 25596 value=64 prim::Constant pnnx_17755 0 1 25597 value=-1 prim::ListConstruct pnnx_17756 3 1 16614 25596 25597 16654 Tensor.index Tensor.index_413 2 1 relative_position_bias_table.177 16652 16653 $input=relative_position_bias_table.177 $expr=16652 #relative_position_bias_table.177=(225,6)f32 #16653=(4096,6)f32 prim::Constant pnnx_17758 0 1 25598 value=2 prim::Constant pnnx_17759 0 1 25599 value=0 prim::Constant pnnx_17760 0 1 25600 value=1 prim::ListConstruct pnnx_17761 3 1 25598 25599 25600 16656 Tensor.view Tensor.view_1951 2 1 16653 16654 relative_position_bias.177 $input=16653 $shape=16654 #16653=(4096,6)f32 #relative_position_bias.177=(64,64,6)f32 prim::Constant pnnx_17765 0 1 25602 value=0 torch.permute torch.permute_2878 2 1 relative_position_bias.177 16656 16657 $input=relative_position_bias.177 $dims=16656 #relative_position_bias.177=(64,64,6)f32 #16657=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_264 1 1 16657 relative_position_bias0.105 memory_format=torch.contiguous_format $input=16657 #16657=(6,64,64)f32 #relative_position_bias0.105=(6,64,64)f32 prim::Constant pnnx_17767 0 1 25603 value=1 torch.transpose torch.transpose_3167 3 1 k.177 16612 16613 16648 $input=k.177 $dim0=16612 $dim1=16613 #k.177=(36,6,64,32)f32 #16648=(36,6,32,64)f32 torch.matmul torch.matmul_2378 2 1 q0.105 16648 attn.355 $input=q0.105 $other=16648 #q0.105=(36,6,64,32)f32 #16648=(36,6,32,64)f32 #attn.355=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3391 2 1 relative_position_bias0.105 25602 16659 $input=relative_position_bias0.105 $dim=25602 #relative_position_bias0.105=(6,64,64)f32 #16659=(1,6,64,64)f32 aten::add pnnx_17768 3 1 attn.355 16659 25603 attn0.53 #attn.355=(36,6,64,64)f32 #16659=(1,6,64,64)f32 #attn0.53=(36,6,64,64)f32 prim::Constant pnnx_17769 0 1 25604 value=0 aten::size pnnx_17770 2 1 attn_mask.89 25604 16661 #attn_mask.89=(36,64,64)f32 prim::NumToTensor pnnx_17771 1 1 16661 other.89 aten::Int pnnx_17772 1 1 other.89 16663 prim::Constant pnnx_17773 0 1 25605 value=trunc aten::div pnnx_17774 3 1 B_.177 other.89 25605 16664 aten::Int pnnx_17775 1 1 16664 16665 prim::Constant pnnx_17776 0 1 25606 value=6 prim::ListConstruct pnnx_17777 5 1 16665 16663 25606 16632 16631 16666 prim::Constant pnnx_17779 0 1 25607 value=1 prim::Constant pnnx_17781 0 1 25608 value=0 prim::Constant pnnx_17783 0 1 25609 value=1 Tensor.view Tensor.view_1952 2 1 attn0.53 16666 16667 $input=attn0.53 $shape=16666 #attn0.53=(36,6,64,64)f32 #16667=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3392 2 1 attn_mask.89 25607 16668 $input=attn_mask.89 $dim=25607 #attn_mask.89=(36,64,64)f32 #16668=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3393 2 1 16668 25608 16669 $input=16668 $dim=25608 #16668=(36,1,64,64)f32 #16669=(1,36,1,64,64)f32 aten::add pnnx_17784 3 1 16667 16669 25609 attn1.53 #16667=(1,36,6,64,64)f32 #16669=(1,36,1,64,64)f32 #attn1.53=(1,36,6,64,64)f32 prim::Constant pnnx_17785 0 1 25610 value=-1 prim::Constant pnnx_17786 0 1 25611 value=6 prim::ListConstruct pnnx_17787 4 1 25610 25611 16630 16629 16671 Tensor.view Tensor.view_1953 2 1 attn1.53 16671 input.397 $input=attn1.53 $shape=16671 #attn1.53=(1,36,6,64,64)f32 #input.397=(36,6,64,64)f32 nn.Softmax layers_mmsa.2.residual_group.blocks.3.attn.softmax 1 1 input.397 16673 dim=-1 #input.397=(36,6,64,64)f32 #16673=(36,6,64,64)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.3.attn.attn_drop 1 1 16673 16674 #16673=(36,6,64,64)f32 #16674=(36,6,64,64)f32 Tensor.select Tensor.select_913 3 1 qkv0.105 25593 25594 v.177 $input=qkv0.105 $dim=25593 $index=25594 #qkv0.105=(3,36,6,64,32)f32 #v.177=(36,6,64,32)f32 prim::Constant pnnx_17790 0 1 25612 value=1 prim::Constant pnnx_17791 0 1 25613 value=2 torch.matmul torch.matmul_2379 2 1 16674 v.177 16675 $input=16674 $other=v.177 #16674=(36,6,64,64)f32 #v.177=(36,6,64,32)f32 #16675=(36,6,64,32)f32 prim::ListConstruct pnnx_17793 3 1 16624 16628 16636 16677 torch.transpose torch.transpose_3168 3 1 16675 25612 25613 16676 $input=16675 $dim0=25612 $dim1=25613 #16675=(36,6,64,32)f32 #16676=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_609 2 1 16676 16677 input0.109 $input=16676 $shape=16677 #16676=(36,64,6,32)f32 #input0.109=(36,64,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.3.attn.proj 1 1 input0.109 16679 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.109=(36,64,192)f32 #16679=(36,64,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.3.attn.proj_drop 1 1 16679 16680 #16679=(36,64,192)f32 #16680=(36,64,192)f32 prim::Constant pnnx_17795 0 1 25614 value=-1 prim::Constant pnnx_17796 0 1 25615 value=8 prim::Constant pnnx_17797 0 1 25616 value=8 prim::ListConstruct pnnx_17798 4 1 25614 25615 25616 16570 16681 prim::Constant pnnx_17800 0 1 25617 value=8 prim::Constant pnnx_17801 0 1 25618 value=trunc aten::div pnnx_17802 3 1 H1.1 25617 25618 16683 aten::Int pnnx_17803 1 1 16683 16684 prim::Constant pnnx_17804 0 1 25619 value=8 prim::Constant pnnx_17805 0 1 25620 value=trunc aten::div pnnx_17806 3 1 W1.1 25619 25620 16685 aten::Int pnnx_17807 1 1 16685 16686 prim::Constant pnnx_17808 0 1 25621 value=1 prim::Constant pnnx_17809 0 1 25622 value=8 prim::Constant pnnx_17810 0 1 25623 value=8 prim::Constant pnnx_17811 0 1 25624 value=-1 prim::ListConstruct pnnx_17812 6 1 25621 16684 16686 25622 25623 25624 16687 prim::Constant pnnx_17814 0 1 25625 value=0 prim::Constant pnnx_17815 0 1 25626 value=1 prim::Constant pnnx_17816 0 1 25627 value=3 prim::Constant pnnx_17817 0 1 25628 value=2 prim::Constant pnnx_17818 0 1 25629 value=4 prim::Constant pnnx_17819 0 1 25630 value=5 prim::ListConstruct pnnx_17820 6 1 25625 25626 25627 25628 25629 25630 16689 Tensor.view Tensor.view_1954 2 1 16680 16681 windows.177 $input=16680 $shape=16681 #16680=(36,64,192)f32 #windows.177=(36,8,8,192)f32 Tensor.view Tensor.view_1955 2 1 windows.177 16687 x3.105 $input=windows.177 $shape=16687 #windows.177=(36,8,8,192)f32 #x3.105=(1,6,6,8,8,192)f32 prim::Constant pnnx_17824 0 1 25632 value=1 prim::Constant pnnx_17825 0 1 25633 value=-1 prim::ListConstruct pnnx_17826 4 1 25632 1672 1912 25633 16692 torch.permute torch.permute_2879 2 1 x3.105 16689 16690 $input=x3.105 $dims=16689 #x3.105=(1,6,6,8,8,192)f32 #16690=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_265 1 1 16690 16691 memory_format=torch.contiguous_format $input=16690 #16690=(1,6,8,6,8,192)f32 #16691=(1,6,8,6,8,192)f32 prim::Constant pnnx_17828 0 1 25634 value=4 prim::Constant pnnx_17829 0 1 25635 value=4 prim::ListConstruct pnnx_17830 2 1 25634 25635 16694 prim::Constant pnnx_17831 0 1 25636 value=1 prim::Constant pnnx_17832 0 1 25637 value=2 prim::ListConstruct pnnx_17833 2 1 25636 25637 16695 Tensor.view Tensor.view_1956 2 1 16691 16692 shifted_x.89 $input=16691 $shape=16692 #16691=(1,6,8,6,8,192)f32 #shifted_x.89=(1,48,48,192)f32 aten::mul pnnx_17835 2 1 H1.1 W1.1 16697 aten::Int pnnx_17836 1 1 16697 16698 prim::ListConstruct pnnx_17837 3 1 16565 16698 16569 16699 prim::Constant pnnx_17839 0 1 16701 value=None prim::Constant pnnx_17840 0 1 25638 value=1 torch.roll torch.roll_2507 3 1 shifted_x.89 16694 16695 x4.105 $input=shifted_x.89 $shifts=16694 $dims=16695 #shifted_x.89=(1,48,48,192)f32 #x4.105=(1,48,48,192)f32 Tensor.view Tensor.view_1957 2 1 x4.105 16699 x5.89 $input=x4.105 $shape=16699 #x4.105=(1,48,48,192)f32 #x5.89=(1,2304,192)f32 aten::add pnnx_17841 3 1 16544 x5.89 25638 input.399 #16544=(1,2304,192)f32 #x5.89=(1,2304,192)f32 #input.399=(1,2304,192)f32 nn.LayerNorm layers_mmsa.2.residual_group.blocks.3.norm2 1 1 input.399 16703 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.399=(1,2304,192)f32 #16703=(1,2304,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.3.mlp.fc1 1 1 16703 16708 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #16703=(1,2304,192)f32 #16708=(1,2304,384)f32 nn.GELU layers_mmsa.2.residual_group.blocks.3.mlp.act 1 1 16708 16709 #16708=(1,2304,384)f32 #16709=(1,2304,384)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.3.mlp.drop 1 1 16709 16710 #16709=(1,2304,384)f32 #16710=(1,2304,384)f32 nn.Linear layers_mmsa.2.residual_group.blocks.3.mlp.fc2 1 1 16710 16711 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #16710=(1,2304,384)f32 #16711=(1,2304,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.3.mlp.drop 1 1 16711 16712 #16711=(1,2304,192)f32 #16712=(1,2304,192)f32 prim::Constant pnnx_17842 0 1 16713 value=None prim::Constant pnnx_17843 0 1 25639 value=1 aten::add pnnx_17844 3 1 input.399 16712 25639 16714 #input.399=(1,2304,192)f32 #16712=(1,2304,192)f32 #16714=(1,2304,192)f32 prim::Constant pnnx_17845 0 1 16715 value=trunc prim::Constant pnnx_17846 0 1 16716 value=8 prim::Constant pnnx_17847 0 1 16717 value=0 prim::Constant pnnx_17848 0 1 16718 value=2 prim::Constant pnnx_17849 0 1 16719 value=1 prim::Constant pnnx_17850 0 1 16720 value=3 prim::Constant pnnx_17851 0 1 16721 value=8 prim::Constant pnnx_17852 0 1 16722 value=4 prim::Constant pnnx_17853 0 1 16723 value=5 prim::Constant pnnx_17854 0 1 16724 value=-1 prim::Constant pnnx_17855 0 1 16725 value=64 aten::size pnnx_17856 2 1 16714 16717 16731 #16714=(1,2304,192)f32 prim::NumToTensor pnnx_17857 1 1 16731 B.213 aten::Int pnnx_17858 1 1 B.213 16733 aten::Int pnnx_17859 1 1 B.213 16734 aten::size pnnx_17860 2 1 16714 16718 16735 #16714=(1,2304,192)f32 prim::NumToTensor pnnx_17861 1 1 16735 C.363 aten::Int pnnx_17862 1 1 C.363 16737 aten::Int pnnx_17863 1 1 C.363 16738 aten::Int pnnx_17864 1 1 C.363 16739 aten::Int pnnx_17865 1 1 C.363 16740 nn.LayerNorm layers_mmsa.2.residual_group.blocks.4.norm1 1 1 16714 16741 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #16714=(1,2304,192)f32 #16741=(1,2304,192)f32 prim::ListConstruct pnnx_17866 4 1 16734 1669 1909 16740 16742 prim::Constant pnnx_17868 0 1 25640 value=0 Tensor.view Tensor.view_1958 2 1 16741 16742 x.179 $input=16741 $shape=16742 #16741=(1,2304,192)f32 #x.179=(1,48,48,192)f32 aten::size pnnx_17869 2 1 x.179 25640 16744 #x.179=(1,48,48,192)f32 prim::NumToTensor pnnx_17870 1 1 16744 B0.107 aten::Int pnnx_17871 1 1 B0.107 16746 aten::size pnnx_17872 2 1 x.179 16719 16747 #x.179=(1,48,48,192)f32 prim::NumToTensor pnnx_17873 1 1 16747 16748 prim::Constant pnnx_17874 0 1 25641 value=2 aten::size pnnx_17875 2 1 x.179 25641 16749 #x.179=(1,48,48,192)f32 prim::NumToTensor pnnx_17876 1 1 16749 16750 aten::size pnnx_17877 2 1 x.179 16720 16751 #x.179=(1,48,48,192)f32 prim::NumToTensor pnnx_17878 1 1 16751 C0.107 aten::Int pnnx_17879 1 1 C0.107 16753 aten::Int pnnx_17880 1 1 C0.107 16754 aten::div pnnx_17881 3 1 16748 16716 16715 16755 aten::Int pnnx_17882 1 1 16755 16756 prim::Constant pnnx_17883 0 1 25642 value=8 prim::Constant pnnx_17884 0 1 25643 value=trunc aten::div pnnx_17885 3 1 16750 25642 25643 16757 aten::Int pnnx_17886 1 1 16757 16758 prim::Constant pnnx_17887 0 1 25644 value=8 prim::ListConstruct pnnx_17888 6 1 16746 16756 16721 16758 25644 16754 16759 prim::Constant pnnx_17890 0 1 25645 value=0 prim::Constant pnnx_17891 0 1 25646 value=1 prim::Constant pnnx_17892 0 1 25647 value=3 prim::Constant pnnx_17893 0 1 25648 value=2 prim::ListConstruct pnnx_17894 6 1 25645 25646 25647 25648 16722 16723 16761 Tensor.view Tensor.view_1959 2 1 x.179 16759 x0.107 $input=x.179 $shape=16759 #x.179=(1,48,48,192)f32 #x0.107=(1,6,8,6,8,192)f32 prim::Constant pnnx_17898 0 1 25650 value=8 prim::Constant pnnx_17899 0 1 25651 value=8 prim::ListConstruct pnnx_17900 4 1 16724 25650 25651 16753 16764 torch.permute torch.permute_2880 2 1 x0.107 16761 16762 $input=x0.107 $dims=16761 #x0.107=(1,6,8,6,8,192)f32 #16762=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_266 1 1 16762 16763 memory_format=torch.contiguous_format $input=16762 #16762=(1,6,6,8,8,192)f32 #16763=(1,6,6,8,8,192)f32 prim::Constant pnnx_17902 0 1 25652 value=-1 prim::ListConstruct pnnx_17903 3 1 25652 16725 16739 16766 prim::Constant pnnx_17905 0 1 16768 value=1.767767e-01 prim::Constant pnnx_17906 0 1 16769 value=trunc prim::Constant pnnx_17907 0 1 16770 value=6 prim::Constant pnnx_17908 0 1 16771 value=0 prim::Constant pnnx_17909 0 1 16772 value=1 prim::Constant pnnx_17910 0 1 16773 value=2 prim::Constant pnnx_17911 0 1 16774 value=3 prim::Constant pnnx_17912 0 1 16775 value=6 prim::Constant pnnx_17913 0 1 16776 value=4 prim::Constant pnnx_17914 0 1 16777 value=-2 prim::Constant pnnx_17915 0 1 16778 value=-1 prim::Constant pnnx_17916 0 1 16779 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.4.attn 0 1 relative_position_bias_table.179 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.179=(225,6)f32 pnnx.Attribute layers_mmsa.2.residual_group.blocks.4.attn 0 1 relative_position_index.179 @relative_position_index=(64,64)i64 #relative_position_index.179=(64,64)i64 Tensor.view Tensor.view_1960 2 1 16763 16764 x_windows.179 $input=16763 $shape=16764 #16763=(1,6,6,8,8,192)f32 #x_windows.179=(36,8,8,192)f32 Tensor.view Tensor.view_1961 2 1 x_windows.179 16766 x1.107 $input=x_windows.179 $shape=16766 #x_windows.179=(36,8,8,192)f32 #x1.107=(36,64,192)f32 aten::size pnnx_17917 2 1 x1.107 16771 16787 #x1.107=(36,64,192)f32 prim::NumToTensor pnnx_17918 1 1 16787 B_.179 aten::Int pnnx_17919 1 1 B_.179 16789 aten::Int pnnx_17920 1 1 B_.179 16790 aten::size pnnx_17921 2 1 x1.107 16772 16791 #x1.107=(36,64,192)f32 prim::NumToTensor pnnx_17922 1 1 16791 N.179 aten::Int pnnx_17923 1 1 N.179 16793 aten::Int pnnx_17924 1 1 N.179 16794 aten::size pnnx_17925 2 1 x1.107 16773 16795 #x1.107=(36,64,192)f32 prim::NumToTensor pnnx_17926 1 1 16795 C.365 aten::Int pnnx_17927 1 1 C.365 16797 nn.Linear layers_mmsa.2.residual_group.blocks.4.attn.qkv 1 1 x1.107 16798 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.107=(36,64,192)f32 #16798=(36,64,576)f32 aten::div pnnx_17928 3 1 C.365 16770 16769 16799 aten::Int pnnx_17929 1 1 16799 16800 prim::ListConstruct pnnx_17930 5 1 16790 16794 16774 16775 16800 16801 prim::Constant pnnx_17932 0 1 25653 value=2 prim::Constant pnnx_17933 0 1 25654 value=0 prim::Constant pnnx_17934 0 1 25655 value=3 prim::Constant pnnx_17935 0 1 25656 value=1 prim::ListConstruct pnnx_17936 5 1 25653 25654 25655 25656 16776 16803 Tensor.reshape Tensor.reshape_610 2 1 16798 16801 16802 $input=16798 $shape=16801 #16798=(36,64,576)f32 #16802=(36,64,3,6,32)f32 prim::Constant pnnx_17938 0 1 25657 value=0 prim::Constant pnnx_17939 0 1 25658 value=0 prim::Constant pnnx_17941 0 1 25659 value=0 prim::Constant pnnx_17942 0 1 25660 value=1 prim::Constant pnnx_17944 0 1 25661 value=0 prim::Constant pnnx_17945 0 1 25662 value=2 torch.permute torch.permute_2881 2 1 16802 16803 qkv0.107 $input=16802 $dims=16803 #16802=(36,64,3,6,32)f32 #qkv0.107=(3,36,6,64,32)f32 Tensor.select Tensor.select_914 3 1 qkv0.107 25657 25658 q.179 $input=qkv0.107 $dim=25657 $index=25658 #qkv0.107=(3,36,6,64,32)f32 #q.179=(36,6,64,32)f32 aten::mul pnnx_17947 2 1 q.179 16768 q0.107 #q.179=(36,6,64,32)f32 #q0.107=(36,6,64,32)f32 Tensor.select Tensor.select_915 3 1 qkv0.107 25659 25660 k.179 $input=qkv0.107 $dim=25659 $index=25660 #qkv0.107=(3,36,6,64,32)f32 #k.179=(36,6,64,32)f32 prim::Constant pnnx_17950 0 1 25663 value=-1 prim::ListConstruct pnnx_17951 1 1 25663 16811 Tensor.view Tensor.view_1962 2 1 relative_position_index.179 16811 16812 $input=relative_position_index.179 $shape=16811 #relative_position_index.179=(64,64)i64 #16812=(4096)i64 prim::ListConstruct pnnx_17953 1 1 16812 16813 #16812=(4096)i64 prim::Constant pnnx_17955 0 1 25664 value=64 prim::Constant pnnx_17956 0 1 25665 value=-1 prim::ListConstruct pnnx_17957 3 1 16779 25664 25665 16815 Tensor.index Tensor.index_414 2 1 relative_position_bias_table.179 16813 16814 $input=relative_position_bias_table.179 $expr=16813 #relative_position_bias_table.179=(225,6)f32 #16814=(4096,6)f32 prim::Constant pnnx_17959 0 1 25666 value=2 prim::Constant pnnx_17960 0 1 25667 value=0 prim::Constant pnnx_17961 0 1 25668 value=1 prim::ListConstruct pnnx_17962 3 1 25666 25667 25668 16817 Tensor.view Tensor.view_1963 2 1 16814 16815 relative_position_bias.179 $input=16814 $shape=16815 #16814=(4096,6)f32 #relative_position_bias.179=(64,64,6)f32 prim::Constant pnnx_17966 0 1 25670 value=0 torch.permute torch.permute_2882 2 1 relative_position_bias.179 16817 16818 $input=relative_position_bias.179 $dims=16817 #relative_position_bias.179=(64,64,6)f32 #16818=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_267 1 1 16818 relative_position_bias0.107 memory_format=torch.contiguous_format $input=16818 #16818=(6,64,64)f32 #relative_position_bias0.107=(6,64,64)f32 prim::Constant pnnx_17968 0 1 25671 value=1 torch.transpose torch.transpose_3169 3 1 k.179 16777 16778 16809 $input=k.179 $dim0=16777 $dim1=16778 #k.179=(36,6,64,32)f32 #16809=(36,6,32,64)f32 torch.matmul torch.matmul_2380 2 1 q0.107 16809 attn.359 $input=q0.107 $other=16809 #q0.107=(36,6,64,32)f32 #16809=(36,6,32,64)f32 #attn.359=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3394 2 1 relative_position_bias0.107 25670 16820 $input=relative_position_bias0.107 $dim=25670 #relative_position_bias0.107=(6,64,64)f32 #16820=(1,6,64,64)f32 aten::add pnnx_17969 3 1 attn.359 16820 25671 input.401 #attn.359=(36,6,64,64)f32 #16820=(1,6,64,64)f32 #input.401=(36,6,64,64)f32 nn.Softmax layers_mmsa.2.residual_group.blocks.4.attn.softmax 1 1 input.401 16822 dim=-1 #input.401=(36,6,64,64)f32 #16822=(36,6,64,64)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.4.attn.attn_drop 1 1 16822 16823 #16822=(36,6,64,64)f32 #16823=(36,6,64,64)f32 Tensor.select Tensor.select_916 3 1 qkv0.107 25661 25662 v.179 $input=qkv0.107 $dim=25661 $index=25662 #qkv0.107=(3,36,6,64,32)f32 #v.179=(36,6,64,32)f32 prim::Constant pnnx_17971 0 1 25672 value=1 prim::Constant pnnx_17972 0 1 25673 value=2 torch.matmul torch.matmul_2381 2 1 16823 v.179 16824 $input=16823 $other=v.179 #16823=(36,6,64,64)f32 #v.179=(36,6,64,32)f32 #16824=(36,6,64,32)f32 prim::ListConstruct pnnx_17974 3 1 16789 16793 16797 16826 torch.transpose torch.transpose_3170 3 1 16824 25672 25673 16825 $input=16824 $dim0=25672 $dim1=25673 #16824=(36,6,64,32)f32 #16825=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_611 2 1 16825 16826 input0.111 $input=16825 $shape=16826 #16825=(36,64,6,32)f32 #input0.111=(36,64,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.4.attn.proj 1 1 input0.111 16828 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.111=(36,64,192)f32 #16828=(36,64,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.4.attn.proj_drop 1 1 16828 16829 #16828=(36,64,192)f32 #16829=(36,64,192)f32 prim::Constant pnnx_17976 0 1 25674 value=-1 prim::Constant pnnx_17977 0 1 25675 value=8 prim::Constant pnnx_17978 0 1 25676 value=8 prim::ListConstruct pnnx_17979 4 1 25674 25675 25676 16738 16830 prim::Constant pnnx_17981 0 1 25677 value=8 prim::Constant pnnx_17982 0 1 25678 value=trunc aten::div pnnx_17983 3 1 H1.1 25677 25678 16832 aten::Int pnnx_17984 1 1 16832 16833 prim::Constant pnnx_17985 0 1 25679 value=8 prim::Constant pnnx_17986 0 1 25680 value=trunc aten::div pnnx_17987 3 1 W1.1 25679 25680 16834 aten::Int pnnx_17988 1 1 16834 16835 prim::Constant pnnx_17989 0 1 25681 value=1 prim::Constant pnnx_17990 0 1 25682 value=8 prim::Constant pnnx_17991 0 1 25683 value=8 prim::Constant pnnx_17992 0 1 25684 value=-1 prim::ListConstruct pnnx_17993 6 1 25681 16833 16835 25682 25683 25684 16836 prim::Constant pnnx_17995 0 1 25685 value=0 prim::Constant pnnx_17996 0 1 25686 value=1 prim::Constant pnnx_17997 0 1 25687 value=3 prim::Constant pnnx_17998 0 1 25688 value=2 prim::Constant pnnx_17999 0 1 25689 value=4 prim::Constant pnnx_18000 0 1 25690 value=5 prim::ListConstruct pnnx_18001 6 1 25685 25686 25687 25688 25689 25690 16838 Tensor.view Tensor.view_1964 2 1 16829 16830 windows.179 $input=16829 $shape=16830 #16829=(36,64,192)f32 #windows.179=(36,8,8,192)f32 Tensor.view Tensor.view_1965 2 1 windows.179 16836 x2.107 $input=windows.179 $shape=16836 #windows.179=(36,8,8,192)f32 #x2.107=(1,6,6,8,8,192)f32 prim::Constant pnnx_18005 0 1 25692 value=1 prim::Constant pnnx_18006 0 1 25693 value=-1 prim::ListConstruct pnnx_18007 4 1 25692 1666 1906 25693 16841 torch.permute torch.permute_2883 2 1 x2.107 16838 16839 $input=x2.107 $dims=16838 #x2.107=(1,6,6,8,8,192)f32 #16839=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_268 1 1 16839 16840 memory_format=torch.contiguous_format $input=16839 #16839=(1,6,8,6,8,192)f32 #16840=(1,6,8,6,8,192)f32 aten::mul pnnx_18009 2 1 H1.1 W1.1 16843 aten::Int pnnx_18010 1 1 16843 16844 prim::ListConstruct pnnx_18011 3 1 16733 16844 16737 16845 prim::Constant pnnx_18013 0 1 16847 value=None prim::Constant pnnx_18014 0 1 25694 value=1 Tensor.view Tensor.view_1966 2 1 16840 16841 x3.107 $input=16840 $shape=16841 #16840=(1,6,8,6,8,192)f32 #x3.107=(1,48,48,192)f32 Tensor.view Tensor.view_1967 2 1 x3.107 16845 x4.107 $input=x3.107 $shape=16845 #x3.107=(1,48,48,192)f32 #x4.107=(1,2304,192)f32 aten::add pnnx_18015 3 1 16714 x4.107 25694 input.403 #16714=(1,2304,192)f32 #x4.107=(1,2304,192)f32 #input.403=(1,2304,192)f32 nn.LayerNorm layers_mmsa.2.residual_group.blocks.4.norm2 1 1 input.403 16849 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.403=(1,2304,192)f32 #16849=(1,2304,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.4.mlp.fc1 1 1 16849 16854 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #16849=(1,2304,192)f32 #16854=(1,2304,384)f32 nn.GELU layers_mmsa.2.residual_group.blocks.4.mlp.act 1 1 16854 16855 #16854=(1,2304,384)f32 #16855=(1,2304,384)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.4.mlp.drop 1 1 16855 16856 #16855=(1,2304,384)f32 #16856=(1,2304,384)f32 nn.Linear layers_mmsa.2.residual_group.blocks.4.mlp.fc2 1 1 16856 16857 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #16856=(1,2304,384)f32 #16857=(1,2304,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.4.mlp.drop 1 1 16857 16858 #16857=(1,2304,192)f32 #16858=(1,2304,192)f32 prim::Constant pnnx_18016 0 1 16859 value=None prim::Constant pnnx_18017 0 1 25695 value=1 aten::add pnnx_18018 3 1 input.403 16858 25695 16860 #input.403=(1,2304,192)f32 #16858=(1,2304,192)f32 #16860=(1,2304,192)f32 prim::Constant pnnx_18019 0 1 16861 value=trunc prim::Constant pnnx_18020 0 1 16862 value=8 prim::Constant pnnx_18021 0 1 16863 value=0 prim::Constant pnnx_18022 0 1 16864 value=2 prim::Constant pnnx_18023 0 1 16865 value=-4 prim::Constant pnnx_18024 0 1 16866 value=1 prim::Constant pnnx_18025 0 1 16867 value=3 prim::Constant pnnx_18026 0 1 16868 value=8 prim::Constant pnnx_18027 0 1 16869 value=4 prim::Constant pnnx_18028 0 1 16870 value=5 prim::Constant pnnx_18029 0 1 16871 value=-1 prim::Constant pnnx_18030 0 1 16872 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.5 0 1 attn_mask.91 @attn_mask=(36,64,64)f32 #attn_mask.91=(36,64,64)f32 aten::size pnnx_18031 2 1 16860 16863 16879 #16860=(1,2304,192)f32 prim::NumToTensor pnnx_18032 1 1 16879 B.215 aten::Int pnnx_18033 1 1 B.215 16881 aten::Int pnnx_18034 1 1 B.215 16882 aten::size pnnx_18035 2 1 16860 16864 16883 #16860=(1,2304,192)f32 prim::NumToTensor pnnx_18036 1 1 16883 C.367 aten::Int pnnx_18037 1 1 C.367 16885 aten::Int pnnx_18038 1 1 C.367 16886 aten::Int pnnx_18039 1 1 C.367 16887 aten::Int pnnx_18040 1 1 C.367 16888 nn.LayerNorm layers_mmsa.2.residual_group.blocks.5.norm1 1 1 16860 16889 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #16860=(1,2304,192)f32 #16889=(1,2304,192)f32 prim::ListConstruct pnnx_18041 4 1 16882 1663 1903 16888 16890 prim::Constant pnnx_18043 0 1 25696 value=-4 prim::ListConstruct pnnx_18044 2 1 16865 25696 16892 prim::Constant pnnx_18045 0 1 25697 value=2 prim::ListConstruct pnnx_18046 2 1 16866 25697 16893 Tensor.view Tensor.view_1968 2 1 16889 16890 x.181 $input=16889 $shape=16890 #16889=(1,2304,192)f32 #x.181=(1,48,48,192)f32 prim::Constant pnnx_18048 0 1 25698 value=0 torch.roll torch.roll_2508 3 1 x.181 16892 16893 x0.109 $input=x.181 $shifts=16892 $dims=16893 #x.181=(1,48,48,192)f32 #x0.109=(1,48,48,192)f32 aten::size pnnx_18049 2 1 x0.109 25698 16895 #x0.109=(1,48,48,192)f32 prim::NumToTensor pnnx_18050 1 1 16895 B0.109 aten::Int pnnx_18051 1 1 B0.109 16897 prim::Constant pnnx_18052 0 1 25699 value=1 aten::size pnnx_18053 2 1 x0.109 25699 16898 #x0.109=(1,48,48,192)f32 prim::NumToTensor pnnx_18054 1 1 16898 16899 prim::Constant pnnx_18055 0 1 25700 value=2 aten::size pnnx_18056 2 1 x0.109 25700 16900 #x0.109=(1,48,48,192)f32 prim::NumToTensor pnnx_18057 1 1 16900 16901 aten::size pnnx_18058 2 1 x0.109 16867 16902 #x0.109=(1,48,48,192)f32 prim::NumToTensor pnnx_18059 1 1 16902 C0.109 aten::Int pnnx_18060 1 1 C0.109 16904 aten::Int pnnx_18061 1 1 C0.109 16905 aten::div pnnx_18062 3 1 16899 16862 16861 16906 aten::Int pnnx_18063 1 1 16906 16907 prim::Constant pnnx_18064 0 1 25701 value=8 prim::Constant pnnx_18065 0 1 25702 value=trunc aten::div pnnx_18066 3 1 16901 25701 25702 16908 aten::Int pnnx_18067 1 1 16908 16909 prim::Constant pnnx_18068 0 1 25703 value=8 prim::ListConstruct pnnx_18069 6 1 16897 16907 16868 16909 25703 16905 16910 prim::Constant pnnx_18071 0 1 25704 value=0 prim::Constant pnnx_18072 0 1 25705 value=1 prim::Constant pnnx_18073 0 1 25706 value=3 prim::Constant pnnx_18074 0 1 25707 value=2 prim::ListConstruct pnnx_18075 6 1 25704 25705 25706 25707 16869 16870 16912 Tensor.view Tensor.view_1969 2 1 x0.109 16910 x1.109 $input=x0.109 $shape=16910 #x0.109=(1,48,48,192)f32 #x1.109=(1,6,8,6,8,192)f32 prim::Constant pnnx_18079 0 1 25709 value=8 prim::Constant pnnx_18080 0 1 25710 value=8 prim::ListConstruct pnnx_18081 4 1 16871 25709 25710 16904 16915 torch.permute torch.permute_2884 2 1 x1.109 16912 16913 $input=x1.109 $dims=16912 #x1.109=(1,6,8,6,8,192)f32 #16913=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_269 1 1 16913 16914 memory_format=torch.contiguous_format $input=16913 #16913=(1,6,6,8,8,192)f32 #16914=(1,6,6,8,8,192)f32 prim::Constant pnnx_18083 0 1 25711 value=-1 prim::ListConstruct pnnx_18084 3 1 25711 16872 16887 16917 prim::Constant pnnx_18086 0 1 16919 value=1.767767e-01 prim::Constant pnnx_18087 0 1 16920 value=trunc prim::Constant pnnx_18088 0 1 16921 value=6 prim::Constant pnnx_18089 0 1 16922 value=0 prim::Constant pnnx_18090 0 1 16923 value=1 prim::Constant pnnx_18091 0 1 16924 value=2 prim::Constant pnnx_18092 0 1 16925 value=3 prim::Constant pnnx_18093 0 1 16926 value=6 prim::Constant pnnx_18094 0 1 16927 value=4 prim::Constant pnnx_18095 0 1 16928 value=-2 prim::Constant pnnx_18096 0 1 16929 value=-1 prim::Constant pnnx_18097 0 1 16930 value=64 pnnx.Attribute layers_mmsa.2.residual_group.blocks.5.attn 0 1 relative_position_bias_table.181 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.181=(225,6)f32 pnnx.Attribute layers_mmsa.2.residual_group.blocks.5.attn 0 1 relative_position_index.181 @relative_position_index=(64,64)i64 #relative_position_index.181=(64,64)i64 Tensor.view Tensor.view_1970 2 1 16914 16915 x_windows.181 $input=16914 $shape=16915 #16914=(1,6,6,8,8,192)f32 #x_windows.181=(36,8,8,192)f32 Tensor.view Tensor.view_1971 2 1 x_windows.181 16917 x2.109 $input=x_windows.181 $shape=16917 #x_windows.181=(36,8,8,192)f32 #x2.109=(36,64,192)f32 aten::size pnnx_18098 2 1 x2.109 16922 16938 #x2.109=(36,64,192)f32 prim::NumToTensor pnnx_18099 1 1 16938 B_.181 aten::Int pnnx_18100 1 1 B_.181 16940 aten::Int pnnx_18101 1 1 B_.181 16941 aten::size pnnx_18102 2 1 x2.109 16923 16942 #x2.109=(36,64,192)f32 prim::NumToTensor pnnx_18103 1 1 16942 N.181 aten::Int pnnx_18104 1 1 N.181 16944 aten::Int pnnx_18105 1 1 N.181 16945 aten::Int pnnx_18106 1 1 N.181 16946 aten::Int pnnx_18107 1 1 N.181 16947 aten::Int pnnx_18108 1 1 N.181 16948 aten::Int pnnx_18109 1 1 N.181 16949 aten::size pnnx_18110 2 1 x2.109 16924 16950 #x2.109=(36,64,192)f32 prim::NumToTensor pnnx_18111 1 1 16950 C.369 aten::Int pnnx_18112 1 1 C.369 16952 nn.Linear layers_mmsa.2.residual_group.blocks.5.attn.qkv 1 1 x2.109 16953 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.109=(36,64,192)f32 #16953=(36,64,576)f32 aten::div pnnx_18113 3 1 C.369 16921 16920 16954 aten::Int pnnx_18114 1 1 16954 16955 prim::ListConstruct pnnx_18115 5 1 16941 16949 16925 16926 16955 16956 prim::Constant pnnx_18117 0 1 25712 value=2 prim::Constant pnnx_18118 0 1 25713 value=0 prim::Constant pnnx_18119 0 1 25714 value=3 prim::Constant pnnx_18120 0 1 25715 value=1 prim::ListConstruct pnnx_18121 5 1 25712 25713 25714 25715 16927 16958 Tensor.reshape Tensor.reshape_612 2 1 16953 16956 16957 $input=16953 $shape=16956 #16953=(36,64,576)f32 #16957=(36,64,3,6,32)f32 prim::Constant pnnx_18123 0 1 25716 value=0 prim::Constant pnnx_18124 0 1 25717 value=0 prim::Constant pnnx_18126 0 1 25718 value=0 prim::Constant pnnx_18127 0 1 25719 value=1 prim::Constant pnnx_18129 0 1 25720 value=0 prim::Constant pnnx_18130 0 1 25721 value=2 torch.permute torch.permute_2885 2 1 16957 16958 qkv0.109 $input=16957 $dims=16958 #16957=(36,64,3,6,32)f32 #qkv0.109=(3,36,6,64,32)f32 Tensor.select Tensor.select_917 3 1 qkv0.109 25716 25717 q.181 $input=qkv0.109 $dim=25716 $index=25717 #qkv0.109=(3,36,6,64,32)f32 #q.181=(36,6,64,32)f32 aten::mul pnnx_18132 2 1 q.181 16919 q0.109 #q.181=(36,6,64,32)f32 #q0.109=(36,6,64,32)f32 Tensor.select Tensor.select_918 3 1 qkv0.109 25718 25719 k.181 $input=qkv0.109 $dim=25718 $index=25719 #qkv0.109=(3,36,6,64,32)f32 #k.181=(36,6,64,32)f32 prim::Constant pnnx_18135 0 1 25722 value=-1 prim::ListConstruct pnnx_18136 1 1 25722 16966 Tensor.view Tensor.view_1972 2 1 relative_position_index.181 16966 16967 $input=relative_position_index.181 $shape=16966 #relative_position_index.181=(64,64)i64 #16967=(4096)i64 prim::ListConstruct pnnx_18138 1 1 16967 16968 #16967=(4096)i64 prim::Constant pnnx_18140 0 1 25723 value=64 prim::Constant pnnx_18141 0 1 25724 value=-1 prim::ListConstruct pnnx_18142 3 1 16930 25723 25724 16970 Tensor.index Tensor.index_415 2 1 relative_position_bias_table.181 16968 16969 $input=relative_position_bias_table.181 $expr=16968 #relative_position_bias_table.181=(225,6)f32 #16969=(4096,6)f32 prim::Constant pnnx_18144 0 1 25725 value=2 prim::Constant pnnx_18145 0 1 25726 value=0 prim::Constant pnnx_18146 0 1 25727 value=1 prim::ListConstruct pnnx_18147 3 1 25725 25726 25727 16972 Tensor.view Tensor.view_1973 2 1 16969 16970 relative_position_bias.181 $input=16969 $shape=16970 #16969=(4096,6)f32 #relative_position_bias.181=(64,64,6)f32 prim::Constant pnnx_18151 0 1 25729 value=0 torch.permute torch.permute_2886 2 1 relative_position_bias.181 16972 16973 $input=relative_position_bias.181 $dims=16972 #relative_position_bias.181=(64,64,6)f32 #16973=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_270 1 1 16973 relative_position_bias0.109 memory_format=torch.contiguous_format $input=16973 #16973=(6,64,64)f32 #relative_position_bias0.109=(6,64,64)f32 prim::Constant pnnx_18153 0 1 25730 value=1 torch.transpose torch.transpose_3171 3 1 k.181 16928 16929 16964 $input=k.181 $dim0=16928 $dim1=16929 #k.181=(36,6,64,32)f32 #16964=(36,6,32,64)f32 torch.matmul torch.matmul_2382 2 1 q0.109 16964 attn.363 $input=q0.109 $other=16964 #q0.109=(36,6,64,32)f32 #16964=(36,6,32,64)f32 #attn.363=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3395 2 1 relative_position_bias0.109 25729 16975 $input=relative_position_bias0.109 $dim=25729 #relative_position_bias0.109=(6,64,64)f32 #16975=(1,6,64,64)f32 aten::add pnnx_18154 3 1 attn.363 16975 25730 attn0.55 #attn.363=(36,6,64,64)f32 #16975=(1,6,64,64)f32 #attn0.55=(36,6,64,64)f32 prim::Constant pnnx_18155 0 1 25731 value=0 aten::size pnnx_18156 2 1 attn_mask.91 25731 16977 #attn_mask.91=(36,64,64)f32 prim::NumToTensor pnnx_18157 1 1 16977 other.91 aten::Int pnnx_18158 1 1 other.91 16979 prim::Constant pnnx_18159 0 1 25732 value=trunc aten::div pnnx_18160 3 1 B_.181 other.91 25732 16980 aten::Int pnnx_18161 1 1 16980 16981 prim::Constant pnnx_18162 0 1 25733 value=6 prim::ListConstruct pnnx_18163 5 1 16981 16979 25733 16948 16947 16982 prim::Constant pnnx_18165 0 1 25734 value=1 prim::Constant pnnx_18167 0 1 25735 value=0 prim::Constant pnnx_18169 0 1 25736 value=1 Tensor.view Tensor.view_1974 2 1 attn0.55 16982 16983 $input=attn0.55 $shape=16982 #attn0.55=(36,6,64,64)f32 #16983=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3396 2 1 attn_mask.91 25734 16984 $input=attn_mask.91 $dim=25734 #attn_mask.91=(36,64,64)f32 #16984=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3397 2 1 16984 25735 16985 $input=16984 $dim=25735 #16984=(36,1,64,64)f32 #16985=(1,36,1,64,64)f32 aten::add pnnx_18170 3 1 16983 16985 25736 attn1.55 #16983=(1,36,6,64,64)f32 #16985=(1,36,1,64,64)f32 #attn1.55=(1,36,6,64,64)f32 prim::Constant pnnx_18171 0 1 25737 value=-1 prim::Constant pnnx_18172 0 1 25738 value=6 prim::ListConstruct pnnx_18173 4 1 25737 25738 16946 16945 16987 Tensor.view Tensor.view_1975 2 1 attn1.55 16987 input.405 $input=attn1.55 $shape=16987 #attn1.55=(1,36,6,64,64)f32 #input.405=(36,6,64,64)f32 nn.Softmax layers_mmsa.2.residual_group.blocks.5.attn.softmax 1 1 input.405 16989 dim=-1 #input.405=(36,6,64,64)f32 #16989=(36,6,64,64)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.5.attn.attn_drop 1 1 16989 16990 #16989=(36,6,64,64)f32 #16990=(36,6,64,64)f32 Tensor.select Tensor.select_919 3 1 qkv0.109 25720 25721 v.181 $input=qkv0.109 $dim=25720 $index=25721 #qkv0.109=(3,36,6,64,32)f32 #v.181=(36,6,64,32)f32 prim::Constant pnnx_18176 0 1 25739 value=1 prim::Constant pnnx_18177 0 1 25740 value=2 torch.matmul torch.matmul_2383 2 1 16990 v.181 16991 $input=16990 $other=v.181 #16990=(36,6,64,64)f32 #v.181=(36,6,64,32)f32 #16991=(36,6,64,32)f32 prim::ListConstruct pnnx_18179 3 1 16940 16944 16952 16993 torch.transpose torch.transpose_3172 3 1 16991 25739 25740 16992 $input=16991 $dim0=25739 $dim1=25740 #16991=(36,6,64,32)f32 #16992=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_613 2 1 16992 16993 input0.113 $input=16992 $shape=16993 #16992=(36,64,6,32)f32 #input0.113=(36,64,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.5.attn.proj 1 1 input0.113 16995 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.113=(36,64,192)f32 #16995=(36,64,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.5.attn.proj_drop 1 1 16995 16996 #16995=(36,64,192)f32 #16996=(36,64,192)f32 prim::Constant pnnx_18181 0 1 25741 value=-1 prim::Constant pnnx_18182 0 1 25742 value=8 prim::Constant pnnx_18183 0 1 25743 value=8 prim::ListConstruct pnnx_18184 4 1 25741 25742 25743 16886 16997 prim::Constant pnnx_18186 0 1 25744 value=8 prim::Constant pnnx_18187 0 1 25745 value=trunc aten::div pnnx_18188 3 1 H1.1 25744 25745 16999 aten::Int pnnx_18189 1 1 16999 17000 prim::Constant pnnx_18190 0 1 25746 value=8 prim::Constant pnnx_18191 0 1 25747 value=trunc aten::div pnnx_18192 3 1 W1.1 25746 25747 17001 aten::Int pnnx_18193 1 1 17001 17002 prim::Constant pnnx_18194 0 1 25748 value=1 prim::Constant pnnx_18195 0 1 25749 value=8 prim::Constant pnnx_18196 0 1 25750 value=8 prim::Constant pnnx_18197 0 1 25751 value=-1 prim::ListConstruct pnnx_18198 6 1 25748 17000 17002 25749 25750 25751 17003 prim::Constant pnnx_18200 0 1 25752 value=0 prim::Constant pnnx_18201 0 1 25753 value=1 prim::Constant pnnx_18202 0 1 25754 value=3 prim::Constant pnnx_18203 0 1 25755 value=2 prim::Constant pnnx_18204 0 1 25756 value=4 prim::Constant pnnx_18205 0 1 25757 value=5 prim::ListConstruct pnnx_18206 6 1 25752 25753 25754 25755 25756 25757 17005 Tensor.view Tensor.view_1976 2 1 16996 16997 windows.181 $input=16996 $shape=16997 #16996=(36,64,192)f32 #windows.181=(36,8,8,192)f32 Tensor.view Tensor.view_1977 2 1 windows.181 17003 x3.109 $input=windows.181 $shape=17003 #windows.181=(36,8,8,192)f32 #x3.109=(1,6,6,8,8,192)f32 prim::Constant pnnx_18210 0 1 25759 value=1 prim::Constant pnnx_18211 0 1 25760 value=-1 prim::ListConstruct pnnx_18212 4 1 25759 1660 1900 25760 17008 torch.permute torch.permute_2887 2 1 x3.109 17005 17006 $input=x3.109 $dims=17005 #x3.109=(1,6,6,8,8,192)f32 #17006=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_271 1 1 17006 17007 memory_format=torch.contiguous_format $input=17006 #17006=(1,6,8,6,8,192)f32 #17007=(1,6,8,6,8,192)f32 prim::Constant pnnx_18214 0 1 25761 value=4 prim::Constant pnnx_18215 0 1 25762 value=4 prim::ListConstruct pnnx_18216 2 1 25761 25762 17010 prim::Constant pnnx_18217 0 1 25763 value=1 prim::Constant pnnx_18218 0 1 25764 value=2 prim::ListConstruct pnnx_18219 2 1 25763 25764 17011 Tensor.view Tensor.view_1978 2 1 17007 17008 shifted_x.91 $input=17007 $shape=17008 #17007=(1,6,8,6,8,192)f32 #shifted_x.91=(1,48,48,192)f32 aten::mul pnnx_18221 2 1 H1.1 W1.1 17013 aten::Int pnnx_18222 1 1 17013 17014 prim::ListConstruct pnnx_18223 3 1 16881 17014 16885 17015 prim::Constant pnnx_18225 0 1 17017 value=None prim::Constant pnnx_18226 0 1 25765 value=1 torch.roll torch.roll_2509 3 1 shifted_x.91 17010 17011 x4.109 $input=shifted_x.91 $shifts=17010 $dims=17011 #shifted_x.91=(1,48,48,192)f32 #x4.109=(1,48,48,192)f32 Tensor.view Tensor.view_1979 2 1 x4.109 17015 x5.91 $input=x4.109 $shape=17015 #x4.109=(1,48,48,192)f32 #x5.91=(1,2304,192)f32 aten::add pnnx_18227 3 1 16860 x5.91 25765 input.407 #16860=(1,2304,192)f32 #x5.91=(1,2304,192)f32 #input.407=(1,2304,192)f32 nn.LayerNorm layers_mmsa.2.residual_group.blocks.5.norm2 1 1 input.407 17019 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.407=(1,2304,192)f32 #17019=(1,2304,192)f32 nn.Linear layers_mmsa.2.residual_group.blocks.5.mlp.fc1 1 1 17019 17024 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #17019=(1,2304,192)f32 #17024=(1,2304,384)f32 nn.GELU layers_mmsa.2.residual_group.blocks.5.mlp.act 1 1 17024 17025 #17024=(1,2304,384)f32 #17025=(1,2304,384)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.5.mlp.drop 1 1 17025 17026 #17025=(1,2304,384)f32 #17026=(1,2304,384)f32 nn.Linear layers_mmsa.2.residual_group.blocks.5.mlp.fc2 1 1 17026 17027 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #17026=(1,2304,384)f32 #17027=(1,2304,192)f32 nn.Dropout layers_mmsa.2.residual_group.blocks.5.mlp.drop 1 1 17027 17028 #17027=(1,2304,192)f32 #17028=(1,2304,192)f32 prim::Constant pnnx_18228 0 1 17029 value=None prim::Constant pnnx_18229 0 1 25766 value=1 aten::add pnnx_18230 3 1 input.407 17028 25766 17030 #input.407=(1,2304,192)f32 #17028=(1,2304,192)f32 #17030=(1,2304,192)f32 prim::Constant pnnx_18231 0 1 17031 value=0 prim::Constant pnnx_18232 0 1 17032 value=1 prim::Constant pnnx_18233 0 1 17033 value=2 prim::Constant pnnx_18234 0 1 17034 value=192 aten::size pnnx_18235 2 1 17030 17031 17035 #17030=(1,2304,192)f32 prim::NumToTensor pnnx_18236 1 1 17035 B.217 aten::Int pnnx_18237 1 1 B.217 17037 prim::ListConstruct pnnx_18239 4 1 17037 17034 1657 1897 17039 torch.transpose torch.transpose_3173 3 1 17030 17032 17033 17038 $input=17030 $dim0=17032 $dim1=17033 #17030=(1,2304,192)f32 #17038=(1,192,2304)f32 Tensor.view Tensor.view_1980 2 1 17038 17039 input.409 $input=17038 $shape=17039 #17038=(1,192,2304)f32 #input.409=(1,192,48,48)f32 nn.Conv2d layers_mmsa.2.conv 1 1 input.409 17041 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.409=(1,192,48,48)f32 #17041=(1,192,48,48)f32 prim::Constant pnnx_18241 0 1 17042 value=-1 prim::Constant pnnx_18242 0 1 17043 value=2 prim::Constant pnnx_18243 0 1 17044 value=1 prim::Constant pnnx_18245 0 1 25767 value=2 torch.flatten torch.flatten_2200 3 1 17041 17043 17042 17045 $input=17041 $start_dim=17043 $end_dim=17042 #17041=(1,192,48,48)f32 #17045=(1,192,2304)f32 torch.transpose torch.transpose_3174 3 1 17045 17044 25767 17046 $input=17045 $dim0=17044 $dim1=25767 #17045=(1,192,2304)f32 #17046=(1,2304,192)f32 aten::add pnnx_18247 3 1 17046 16065 16066 17047 #17046=(1,2304,192)f32 #16065=(1,2304,192)f32 #17047=(1,2304,192)f32 prim::Constant pnnx_18248 0 1 17048 value=1 prim::Constant pnnx_18249 0 1 17065 value=trunc prim::Constant pnnx_18250 0 1 17066 value=8 prim::Constant pnnx_18251 0 1 17067 value=0 prim::Constant pnnx_18252 0 1 17068 value=2 prim::Constant pnnx_18253 0 1 17069 value=1 prim::Constant pnnx_18254 0 1 17070 value=3 prim::Constant pnnx_18255 0 1 17071 value=8 prim::Constant pnnx_18256 0 1 17072 value=4 prim::Constant pnnx_18257 0 1 17073 value=5 prim::Constant pnnx_18258 0 1 17074 value=-1 prim::Constant pnnx_18259 0 1 17075 value=64 aten::size pnnx_18260 2 1 17047 17067 17081 #17047=(1,2304,192)f32 prim::NumToTensor pnnx_18261 1 1 17081 B.219 aten::Int pnnx_18262 1 1 B.219 17083 aten::Int pnnx_18263 1 1 B.219 17084 aten::size pnnx_18264 2 1 17047 17068 17085 #17047=(1,2304,192)f32 prim::NumToTensor pnnx_18265 1 1 17085 C.371 aten::Int pnnx_18266 1 1 C.371 17087 aten::Int pnnx_18267 1 1 C.371 17088 aten::Int pnnx_18268 1 1 C.371 17089 aten::Int pnnx_18269 1 1 C.371 17090 nn.LayerNorm layers_mmsa.3.residual_group.blocks.0.norm1 1 1 17047 17091 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #17047=(1,2304,192)f32 #17091=(1,2304,192)f32 prim::ListConstruct pnnx_18270 4 1 17084 1654 1894 17090 17092 prim::Constant pnnx_18272 0 1 25768 value=0 Tensor.view Tensor.view_1981 2 1 17091 17092 x.183 $input=17091 $shape=17092 #17091=(1,2304,192)f32 #x.183=(1,48,48,192)f32 aten::size pnnx_18273 2 1 x.183 25768 17094 #x.183=(1,48,48,192)f32 prim::NumToTensor pnnx_18274 1 1 17094 B0.111 aten::Int pnnx_18275 1 1 B0.111 17096 aten::size pnnx_18276 2 1 x.183 17069 17097 #x.183=(1,48,48,192)f32 prim::NumToTensor pnnx_18277 1 1 17097 17098 prim::Constant pnnx_18278 0 1 25769 value=2 aten::size pnnx_18279 2 1 x.183 25769 17099 #x.183=(1,48,48,192)f32 prim::NumToTensor pnnx_18280 1 1 17099 17100 aten::size pnnx_18281 2 1 x.183 17070 17101 #x.183=(1,48,48,192)f32 prim::NumToTensor pnnx_18282 1 1 17101 C0.111 aten::Int pnnx_18283 1 1 C0.111 17103 aten::Int pnnx_18284 1 1 C0.111 17104 aten::div pnnx_18285 3 1 17098 17066 17065 17105 aten::Int pnnx_18286 1 1 17105 17106 prim::Constant pnnx_18287 0 1 25770 value=8 prim::Constant pnnx_18288 0 1 25771 value=trunc aten::div pnnx_18289 3 1 17100 25770 25771 17107 aten::Int pnnx_18290 1 1 17107 17108 prim::Constant pnnx_18291 0 1 25772 value=8 prim::ListConstruct pnnx_18292 6 1 17096 17106 17071 17108 25772 17104 17109 prim::Constant pnnx_18294 0 1 25773 value=0 prim::Constant pnnx_18295 0 1 25774 value=1 prim::Constant pnnx_18296 0 1 25775 value=3 prim::Constant pnnx_18297 0 1 25776 value=2 prim::ListConstruct pnnx_18298 6 1 25773 25774 25775 25776 17072 17073 17111 Tensor.view Tensor.view_1982 2 1 x.183 17109 x0.111 $input=x.183 $shape=17109 #x.183=(1,48,48,192)f32 #x0.111=(1,6,8,6,8,192)f32 prim::Constant pnnx_18302 0 1 25778 value=8 prim::Constant pnnx_18303 0 1 25779 value=8 prim::ListConstruct pnnx_18304 4 1 17074 25778 25779 17103 17114 torch.permute torch.permute_2888 2 1 x0.111 17111 17112 $input=x0.111 $dims=17111 #x0.111=(1,6,8,6,8,192)f32 #17112=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_272 1 1 17112 17113 memory_format=torch.contiguous_format $input=17112 #17112=(1,6,6,8,8,192)f32 #17113=(1,6,6,8,8,192)f32 prim::Constant pnnx_18306 0 1 25780 value=-1 prim::ListConstruct pnnx_18307 3 1 25780 17075 17089 17116 prim::Constant pnnx_18309 0 1 17118 value=1.767767e-01 prim::Constant pnnx_18310 0 1 17119 value=trunc prim::Constant pnnx_18311 0 1 17120 value=6 prim::Constant pnnx_18312 0 1 17121 value=0 prim::Constant pnnx_18313 0 1 17122 value=1 prim::Constant pnnx_18314 0 1 17123 value=2 prim::Constant pnnx_18315 0 1 17124 value=3 prim::Constant pnnx_18316 0 1 17125 value=6 prim::Constant pnnx_18317 0 1 17126 value=4 prim::Constant pnnx_18318 0 1 17127 value=-2 prim::Constant pnnx_18319 0 1 17128 value=-1 prim::Constant pnnx_18320 0 1 17129 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.0.attn 0 1 relative_position_bias_table.183 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.183=(225,6)f32 pnnx.Attribute layers_mmsa.3.residual_group.blocks.0.attn 0 1 relative_position_index.183 @relative_position_index=(64,64)i64 #relative_position_index.183=(64,64)i64 Tensor.view Tensor.view_1983 2 1 17113 17114 x_windows.183 $input=17113 $shape=17114 #17113=(1,6,6,8,8,192)f32 #x_windows.183=(36,8,8,192)f32 Tensor.view Tensor.view_1984 2 1 x_windows.183 17116 x1.111 $input=x_windows.183 $shape=17116 #x_windows.183=(36,8,8,192)f32 #x1.111=(36,64,192)f32 aten::size pnnx_18321 2 1 x1.111 17121 17137 #x1.111=(36,64,192)f32 prim::NumToTensor pnnx_18322 1 1 17137 B_.183 aten::Int pnnx_18323 1 1 B_.183 17139 aten::Int pnnx_18324 1 1 B_.183 17140 aten::size pnnx_18325 2 1 x1.111 17122 17141 #x1.111=(36,64,192)f32 prim::NumToTensor pnnx_18326 1 1 17141 N.183 aten::Int pnnx_18327 1 1 N.183 17143 aten::Int pnnx_18328 1 1 N.183 17144 aten::size pnnx_18329 2 1 x1.111 17123 17145 #x1.111=(36,64,192)f32 prim::NumToTensor pnnx_18330 1 1 17145 C.373 aten::Int pnnx_18331 1 1 C.373 17147 nn.Linear layers_mmsa.3.residual_group.blocks.0.attn.qkv 1 1 x1.111 17148 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.111=(36,64,192)f32 #17148=(36,64,576)f32 aten::div pnnx_18332 3 1 C.373 17120 17119 17149 aten::Int pnnx_18333 1 1 17149 17150 prim::ListConstruct pnnx_18334 5 1 17140 17144 17124 17125 17150 17151 prim::Constant pnnx_18336 0 1 25781 value=2 prim::Constant pnnx_18337 0 1 25782 value=0 prim::Constant pnnx_18338 0 1 25783 value=3 prim::Constant pnnx_18339 0 1 25784 value=1 prim::ListConstruct pnnx_18340 5 1 25781 25782 25783 25784 17126 17153 Tensor.reshape Tensor.reshape_614 2 1 17148 17151 17152 $input=17148 $shape=17151 #17148=(36,64,576)f32 #17152=(36,64,3,6,32)f32 prim::Constant pnnx_18342 0 1 25785 value=0 prim::Constant pnnx_18343 0 1 25786 value=0 prim::Constant pnnx_18345 0 1 25787 value=0 prim::Constant pnnx_18346 0 1 25788 value=1 prim::Constant pnnx_18348 0 1 25789 value=0 prim::Constant pnnx_18349 0 1 25790 value=2 torch.permute torch.permute_2889 2 1 17152 17153 qkv0.111 $input=17152 $dims=17153 #17152=(36,64,3,6,32)f32 #qkv0.111=(3,36,6,64,32)f32 Tensor.select Tensor.select_920 3 1 qkv0.111 25785 25786 q.183 $input=qkv0.111 $dim=25785 $index=25786 #qkv0.111=(3,36,6,64,32)f32 #q.183=(36,6,64,32)f32 aten::mul pnnx_18351 2 1 q.183 17118 q0.111 #q.183=(36,6,64,32)f32 #q0.111=(36,6,64,32)f32 Tensor.select Tensor.select_921 3 1 qkv0.111 25787 25788 k.183 $input=qkv0.111 $dim=25787 $index=25788 #qkv0.111=(3,36,6,64,32)f32 #k.183=(36,6,64,32)f32 prim::Constant pnnx_18354 0 1 25791 value=-1 prim::ListConstruct pnnx_18355 1 1 25791 17161 Tensor.view Tensor.view_1985 2 1 relative_position_index.183 17161 17162 $input=relative_position_index.183 $shape=17161 #relative_position_index.183=(64,64)i64 #17162=(4096)i64 prim::ListConstruct pnnx_18357 1 1 17162 17163 #17162=(4096)i64 prim::Constant pnnx_18359 0 1 25792 value=64 prim::Constant pnnx_18360 0 1 25793 value=-1 prim::ListConstruct pnnx_18361 3 1 17129 25792 25793 17165 Tensor.index Tensor.index_416 2 1 relative_position_bias_table.183 17163 17164 $input=relative_position_bias_table.183 $expr=17163 #relative_position_bias_table.183=(225,6)f32 #17164=(4096,6)f32 prim::Constant pnnx_18363 0 1 25794 value=2 prim::Constant pnnx_18364 0 1 25795 value=0 prim::Constant pnnx_18365 0 1 25796 value=1 prim::ListConstruct pnnx_18366 3 1 25794 25795 25796 17167 Tensor.view Tensor.view_1986 2 1 17164 17165 relative_position_bias.183 $input=17164 $shape=17165 #17164=(4096,6)f32 #relative_position_bias.183=(64,64,6)f32 prim::Constant pnnx_18370 0 1 25798 value=0 torch.permute torch.permute_2890 2 1 relative_position_bias.183 17167 17168 $input=relative_position_bias.183 $dims=17167 #relative_position_bias.183=(64,64,6)f32 #17168=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_273 1 1 17168 relative_position_bias0.111 memory_format=torch.contiguous_format $input=17168 #17168=(6,64,64)f32 #relative_position_bias0.111=(6,64,64)f32 prim::Constant pnnx_18372 0 1 25799 value=1 torch.transpose torch.transpose_3175 3 1 k.183 17127 17128 17159 $input=k.183 $dim0=17127 $dim1=17128 #k.183=(36,6,64,32)f32 #17159=(36,6,32,64)f32 torch.matmul torch.matmul_2384 2 1 q0.111 17159 attn.367 $input=q0.111 $other=17159 #q0.111=(36,6,64,32)f32 #17159=(36,6,32,64)f32 #attn.367=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3398 2 1 relative_position_bias0.111 25798 17170 $input=relative_position_bias0.111 $dim=25798 #relative_position_bias0.111=(6,64,64)f32 #17170=(1,6,64,64)f32 aten::add pnnx_18373 3 1 attn.367 17170 25799 input.411 #attn.367=(36,6,64,64)f32 #17170=(1,6,64,64)f32 #input.411=(36,6,64,64)f32 nn.Softmax layers_mmsa.3.residual_group.blocks.0.attn.softmax 1 1 input.411 17172 dim=-1 #input.411=(36,6,64,64)f32 #17172=(36,6,64,64)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.0.attn.attn_drop 1 1 17172 17173 #17172=(36,6,64,64)f32 #17173=(36,6,64,64)f32 Tensor.select Tensor.select_922 3 1 qkv0.111 25789 25790 v.183 $input=qkv0.111 $dim=25789 $index=25790 #qkv0.111=(3,36,6,64,32)f32 #v.183=(36,6,64,32)f32 prim::Constant pnnx_18375 0 1 25800 value=1 prim::Constant pnnx_18376 0 1 25801 value=2 torch.matmul torch.matmul_2385 2 1 17173 v.183 17174 $input=17173 $other=v.183 #17173=(36,6,64,64)f32 #v.183=(36,6,64,32)f32 #17174=(36,6,64,32)f32 prim::ListConstruct pnnx_18378 3 1 17139 17143 17147 17176 torch.transpose torch.transpose_3176 3 1 17174 25800 25801 17175 $input=17174 $dim0=25800 $dim1=25801 #17174=(36,6,64,32)f32 #17175=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_615 2 1 17175 17176 input0.115 $input=17175 $shape=17176 #17175=(36,64,6,32)f32 #input0.115=(36,64,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.0.attn.proj 1 1 input0.115 17178 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.115=(36,64,192)f32 #17178=(36,64,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.0.attn.proj_drop 1 1 17178 17179 #17178=(36,64,192)f32 #17179=(36,64,192)f32 prim::Constant pnnx_18380 0 1 25802 value=-1 prim::Constant pnnx_18381 0 1 25803 value=8 prim::Constant pnnx_18382 0 1 25804 value=8 prim::ListConstruct pnnx_18383 4 1 25802 25803 25804 17088 17180 prim::Constant pnnx_18385 0 1 25805 value=8 prim::Constant pnnx_18386 0 1 25806 value=trunc aten::div pnnx_18387 3 1 H1.1 25805 25806 17182 aten::Int pnnx_18388 1 1 17182 17183 prim::Constant pnnx_18389 0 1 25807 value=8 prim::Constant pnnx_18390 0 1 25808 value=trunc aten::div pnnx_18391 3 1 W1.1 25807 25808 17184 aten::Int pnnx_18392 1 1 17184 17185 prim::Constant pnnx_18393 0 1 25809 value=1 prim::Constant pnnx_18394 0 1 25810 value=8 prim::Constant pnnx_18395 0 1 25811 value=8 prim::Constant pnnx_18396 0 1 25812 value=-1 prim::ListConstruct pnnx_18397 6 1 25809 17183 17185 25810 25811 25812 17186 prim::Constant pnnx_18399 0 1 25813 value=0 prim::Constant pnnx_18400 0 1 25814 value=1 prim::Constant pnnx_18401 0 1 25815 value=3 prim::Constant pnnx_18402 0 1 25816 value=2 prim::Constant pnnx_18403 0 1 25817 value=4 prim::Constant pnnx_18404 0 1 25818 value=5 prim::ListConstruct pnnx_18405 6 1 25813 25814 25815 25816 25817 25818 17188 Tensor.view Tensor.view_1987 2 1 17179 17180 windows.183 $input=17179 $shape=17180 #17179=(36,64,192)f32 #windows.183=(36,8,8,192)f32 Tensor.view Tensor.view_1988 2 1 windows.183 17186 x2.111 $input=windows.183 $shape=17186 #windows.183=(36,8,8,192)f32 #x2.111=(1,6,6,8,8,192)f32 prim::Constant pnnx_18409 0 1 25820 value=1 prim::Constant pnnx_18410 0 1 25821 value=-1 prim::ListConstruct pnnx_18411 4 1 25820 1651 1891 25821 17191 torch.permute torch.permute_2891 2 1 x2.111 17188 17189 $input=x2.111 $dims=17188 #x2.111=(1,6,6,8,8,192)f32 #17189=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_274 1 1 17189 17190 memory_format=torch.contiguous_format $input=17189 #17189=(1,6,8,6,8,192)f32 #17190=(1,6,8,6,8,192)f32 aten::mul pnnx_18413 2 1 H1.1 W1.1 17193 aten::Int pnnx_18414 1 1 17193 17194 prim::ListConstruct pnnx_18415 3 1 17083 17194 17087 17195 prim::Constant pnnx_18417 0 1 17197 value=None prim::Constant pnnx_18418 0 1 25822 value=1 Tensor.view Tensor.view_1989 2 1 17190 17191 x3.111 $input=17190 $shape=17191 #17190=(1,6,8,6,8,192)f32 #x3.111=(1,48,48,192)f32 Tensor.view Tensor.view_1990 2 1 x3.111 17195 x4.111 $input=x3.111 $shape=17195 #x3.111=(1,48,48,192)f32 #x4.111=(1,2304,192)f32 aten::add pnnx_18419 3 1 17047 x4.111 25822 input.413 #17047=(1,2304,192)f32 #x4.111=(1,2304,192)f32 #input.413=(1,2304,192)f32 nn.LayerNorm layers_mmsa.3.residual_group.blocks.0.norm2 1 1 input.413 17199 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.413=(1,2304,192)f32 #17199=(1,2304,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.0.mlp.fc1 1 1 17199 17204 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #17199=(1,2304,192)f32 #17204=(1,2304,384)f32 nn.GELU layers_mmsa.3.residual_group.blocks.0.mlp.act 1 1 17204 17205 #17204=(1,2304,384)f32 #17205=(1,2304,384)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.0.mlp.drop 1 1 17205 17206 #17205=(1,2304,384)f32 #17206=(1,2304,384)f32 nn.Linear layers_mmsa.3.residual_group.blocks.0.mlp.fc2 1 1 17206 17207 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #17206=(1,2304,384)f32 #17207=(1,2304,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.0.mlp.drop 1 1 17207 17208 #17207=(1,2304,192)f32 #17208=(1,2304,192)f32 prim::Constant pnnx_18420 0 1 17209 value=None prim::Constant pnnx_18421 0 1 25823 value=1 aten::add pnnx_18422 3 1 input.413 17208 25823 17210 #input.413=(1,2304,192)f32 #17208=(1,2304,192)f32 #17210=(1,2304,192)f32 prim::Constant pnnx_18423 0 1 17211 value=trunc prim::Constant pnnx_18424 0 1 17212 value=8 prim::Constant pnnx_18425 0 1 17213 value=0 prim::Constant pnnx_18426 0 1 17214 value=2 prim::Constant pnnx_18427 0 1 17215 value=-4 prim::Constant pnnx_18428 0 1 17216 value=1 prim::Constant pnnx_18429 0 1 17217 value=3 prim::Constant pnnx_18430 0 1 17218 value=8 prim::Constant pnnx_18431 0 1 17219 value=4 prim::Constant pnnx_18432 0 1 17220 value=5 prim::Constant pnnx_18433 0 1 17221 value=-1 prim::Constant pnnx_18434 0 1 17222 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.1 0 1 attn_mask.93 @attn_mask=(36,64,64)f32 #attn_mask.93=(36,64,64)f32 aten::size pnnx_18435 2 1 17210 17213 17229 #17210=(1,2304,192)f32 prim::NumToTensor pnnx_18436 1 1 17229 B.221 aten::Int pnnx_18437 1 1 B.221 17231 aten::Int pnnx_18438 1 1 B.221 17232 aten::size pnnx_18439 2 1 17210 17214 17233 #17210=(1,2304,192)f32 prim::NumToTensor pnnx_18440 1 1 17233 C.375 aten::Int pnnx_18441 1 1 C.375 17235 aten::Int pnnx_18442 1 1 C.375 17236 aten::Int pnnx_18443 1 1 C.375 17237 aten::Int pnnx_18444 1 1 C.375 17238 nn.LayerNorm layers_mmsa.3.residual_group.blocks.1.norm1 1 1 17210 17239 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #17210=(1,2304,192)f32 #17239=(1,2304,192)f32 prim::ListConstruct pnnx_18445 4 1 17232 1648 1888 17238 17240 prim::Constant pnnx_18447 0 1 25824 value=-4 prim::ListConstruct pnnx_18448 2 1 17215 25824 17242 prim::Constant pnnx_18449 0 1 25825 value=2 prim::ListConstruct pnnx_18450 2 1 17216 25825 17243 Tensor.view Tensor.view_1991 2 1 17239 17240 x.185 $input=17239 $shape=17240 #17239=(1,2304,192)f32 #x.185=(1,48,48,192)f32 prim::Constant pnnx_18452 0 1 25826 value=0 torch.roll torch.roll_2510 3 1 x.185 17242 17243 x0.113 $input=x.185 $shifts=17242 $dims=17243 #x.185=(1,48,48,192)f32 #x0.113=(1,48,48,192)f32 aten::size pnnx_18453 2 1 x0.113 25826 17245 #x0.113=(1,48,48,192)f32 prim::NumToTensor pnnx_18454 1 1 17245 B0.113 aten::Int pnnx_18455 1 1 B0.113 17247 prim::Constant pnnx_18456 0 1 25827 value=1 aten::size pnnx_18457 2 1 x0.113 25827 17248 #x0.113=(1,48,48,192)f32 prim::NumToTensor pnnx_18458 1 1 17248 17249 prim::Constant pnnx_18459 0 1 25828 value=2 aten::size pnnx_18460 2 1 x0.113 25828 17250 #x0.113=(1,48,48,192)f32 prim::NumToTensor pnnx_18461 1 1 17250 17251 aten::size pnnx_18462 2 1 x0.113 17217 17252 #x0.113=(1,48,48,192)f32 prim::NumToTensor pnnx_18463 1 1 17252 C0.113 aten::Int pnnx_18464 1 1 C0.113 17254 aten::Int pnnx_18465 1 1 C0.113 17255 aten::div pnnx_18466 3 1 17249 17212 17211 17256 aten::Int pnnx_18467 1 1 17256 17257 prim::Constant pnnx_18468 0 1 25829 value=8 prim::Constant pnnx_18469 0 1 25830 value=trunc aten::div pnnx_18470 3 1 17251 25829 25830 17258 aten::Int pnnx_18471 1 1 17258 17259 prim::Constant pnnx_18472 0 1 25831 value=8 prim::ListConstruct pnnx_18473 6 1 17247 17257 17218 17259 25831 17255 17260 prim::Constant pnnx_18475 0 1 25832 value=0 prim::Constant pnnx_18476 0 1 25833 value=1 prim::Constant pnnx_18477 0 1 25834 value=3 prim::Constant pnnx_18478 0 1 25835 value=2 prim::ListConstruct pnnx_18479 6 1 25832 25833 25834 25835 17219 17220 17262 Tensor.view Tensor.view_1992 2 1 x0.113 17260 x1.113 $input=x0.113 $shape=17260 #x0.113=(1,48,48,192)f32 #x1.113=(1,6,8,6,8,192)f32 prim::Constant pnnx_18483 0 1 25837 value=8 prim::Constant pnnx_18484 0 1 25838 value=8 prim::ListConstruct pnnx_18485 4 1 17221 25837 25838 17254 17265 torch.permute torch.permute_2892 2 1 x1.113 17262 17263 $input=x1.113 $dims=17262 #x1.113=(1,6,8,6,8,192)f32 #17263=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_275 1 1 17263 17264 memory_format=torch.contiguous_format $input=17263 #17263=(1,6,6,8,8,192)f32 #17264=(1,6,6,8,8,192)f32 prim::Constant pnnx_18487 0 1 25839 value=-1 prim::ListConstruct pnnx_18488 3 1 25839 17222 17237 17267 prim::Constant pnnx_18490 0 1 17269 value=1.767767e-01 prim::Constant pnnx_18491 0 1 17270 value=trunc prim::Constant pnnx_18492 0 1 17271 value=6 prim::Constant pnnx_18493 0 1 17272 value=0 prim::Constant pnnx_18494 0 1 17273 value=1 prim::Constant pnnx_18495 0 1 17274 value=2 prim::Constant pnnx_18496 0 1 17275 value=3 prim::Constant pnnx_18497 0 1 17276 value=6 prim::Constant pnnx_18498 0 1 17277 value=4 prim::Constant pnnx_18499 0 1 17278 value=-2 prim::Constant pnnx_18500 0 1 17279 value=-1 prim::Constant pnnx_18501 0 1 17280 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.1.attn 0 1 relative_position_bias_table.185 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.185=(225,6)f32 pnnx.Attribute layers_mmsa.3.residual_group.blocks.1.attn 0 1 relative_position_index.185 @relative_position_index=(64,64)i64 #relative_position_index.185=(64,64)i64 Tensor.view Tensor.view_1993 2 1 17264 17265 x_windows.185 $input=17264 $shape=17265 #17264=(1,6,6,8,8,192)f32 #x_windows.185=(36,8,8,192)f32 Tensor.view Tensor.view_1994 2 1 x_windows.185 17267 x2.113 $input=x_windows.185 $shape=17267 #x_windows.185=(36,8,8,192)f32 #x2.113=(36,64,192)f32 aten::size pnnx_18502 2 1 x2.113 17272 17288 #x2.113=(36,64,192)f32 prim::NumToTensor pnnx_18503 1 1 17288 B_.185 aten::Int pnnx_18504 1 1 B_.185 17290 aten::Int pnnx_18505 1 1 B_.185 17291 aten::size pnnx_18506 2 1 x2.113 17273 17292 #x2.113=(36,64,192)f32 prim::NumToTensor pnnx_18507 1 1 17292 N.185 aten::Int pnnx_18508 1 1 N.185 17294 aten::Int pnnx_18509 1 1 N.185 17295 aten::Int pnnx_18510 1 1 N.185 17296 aten::Int pnnx_18511 1 1 N.185 17297 aten::Int pnnx_18512 1 1 N.185 17298 aten::Int pnnx_18513 1 1 N.185 17299 aten::size pnnx_18514 2 1 x2.113 17274 17300 #x2.113=(36,64,192)f32 prim::NumToTensor pnnx_18515 1 1 17300 C.377 aten::Int pnnx_18516 1 1 C.377 17302 nn.Linear layers_mmsa.3.residual_group.blocks.1.attn.qkv 1 1 x2.113 17303 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.113=(36,64,192)f32 #17303=(36,64,576)f32 aten::div pnnx_18517 3 1 C.377 17271 17270 17304 aten::Int pnnx_18518 1 1 17304 17305 prim::ListConstruct pnnx_18519 5 1 17291 17299 17275 17276 17305 17306 prim::Constant pnnx_18521 0 1 25840 value=2 prim::Constant pnnx_18522 0 1 25841 value=0 prim::Constant pnnx_18523 0 1 25842 value=3 prim::Constant pnnx_18524 0 1 25843 value=1 prim::ListConstruct pnnx_18525 5 1 25840 25841 25842 25843 17277 17308 Tensor.reshape Tensor.reshape_616 2 1 17303 17306 17307 $input=17303 $shape=17306 #17303=(36,64,576)f32 #17307=(36,64,3,6,32)f32 prim::Constant pnnx_18527 0 1 25844 value=0 prim::Constant pnnx_18528 0 1 25845 value=0 prim::Constant pnnx_18530 0 1 25846 value=0 prim::Constant pnnx_18531 0 1 25847 value=1 prim::Constant pnnx_18533 0 1 25848 value=0 prim::Constant pnnx_18534 0 1 25849 value=2 torch.permute torch.permute_2893 2 1 17307 17308 qkv0.113 $input=17307 $dims=17308 #17307=(36,64,3,6,32)f32 #qkv0.113=(3,36,6,64,32)f32 Tensor.select Tensor.select_923 3 1 qkv0.113 25844 25845 q.185 $input=qkv0.113 $dim=25844 $index=25845 #qkv0.113=(3,36,6,64,32)f32 #q.185=(36,6,64,32)f32 aten::mul pnnx_18536 2 1 q.185 17269 q0.113 #q.185=(36,6,64,32)f32 #q0.113=(36,6,64,32)f32 Tensor.select Tensor.select_924 3 1 qkv0.113 25846 25847 k.185 $input=qkv0.113 $dim=25846 $index=25847 #qkv0.113=(3,36,6,64,32)f32 #k.185=(36,6,64,32)f32 prim::Constant pnnx_18539 0 1 25850 value=-1 prim::ListConstruct pnnx_18540 1 1 25850 17316 Tensor.view Tensor.view_1995 2 1 relative_position_index.185 17316 17317 $input=relative_position_index.185 $shape=17316 #relative_position_index.185=(64,64)i64 #17317=(4096)i64 prim::ListConstruct pnnx_18542 1 1 17317 17318 #17317=(4096)i64 prim::Constant pnnx_18544 0 1 25851 value=64 prim::Constant pnnx_18545 0 1 25852 value=-1 prim::ListConstruct pnnx_18546 3 1 17280 25851 25852 17320 Tensor.index Tensor.index_417 2 1 relative_position_bias_table.185 17318 17319 $input=relative_position_bias_table.185 $expr=17318 #relative_position_bias_table.185=(225,6)f32 #17319=(4096,6)f32 prim::Constant pnnx_18548 0 1 25853 value=2 prim::Constant pnnx_18549 0 1 25854 value=0 prim::Constant pnnx_18550 0 1 25855 value=1 prim::ListConstruct pnnx_18551 3 1 25853 25854 25855 17322 Tensor.view Tensor.view_1996 2 1 17319 17320 relative_position_bias.185 $input=17319 $shape=17320 #17319=(4096,6)f32 #relative_position_bias.185=(64,64,6)f32 prim::Constant pnnx_18555 0 1 25857 value=0 torch.permute torch.permute_2894 2 1 relative_position_bias.185 17322 17323 $input=relative_position_bias.185 $dims=17322 #relative_position_bias.185=(64,64,6)f32 #17323=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_276 1 1 17323 relative_position_bias0.113 memory_format=torch.contiguous_format $input=17323 #17323=(6,64,64)f32 #relative_position_bias0.113=(6,64,64)f32 prim::Constant pnnx_18557 0 1 25858 value=1 torch.transpose torch.transpose_3177 3 1 k.185 17278 17279 17314 $input=k.185 $dim0=17278 $dim1=17279 #k.185=(36,6,64,32)f32 #17314=(36,6,32,64)f32 torch.matmul torch.matmul_2386 2 1 q0.113 17314 attn.371 $input=q0.113 $other=17314 #q0.113=(36,6,64,32)f32 #17314=(36,6,32,64)f32 #attn.371=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3399 2 1 relative_position_bias0.113 25857 17325 $input=relative_position_bias0.113 $dim=25857 #relative_position_bias0.113=(6,64,64)f32 #17325=(1,6,64,64)f32 aten::add pnnx_18558 3 1 attn.371 17325 25858 attn0.57 #attn.371=(36,6,64,64)f32 #17325=(1,6,64,64)f32 #attn0.57=(36,6,64,64)f32 prim::Constant pnnx_18559 0 1 25859 value=0 aten::size pnnx_18560 2 1 attn_mask.93 25859 17327 #attn_mask.93=(36,64,64)f32 prim::NumToTensor pnnx_18561 1 1 17327 other.93 aten::Int pnnx_18562 1 1 other.93 17329 prim::Constant pnnx_18563 0 1 25860 value=trunc aten::div pnnx_18564 3 1 B_.185 other.93 25860 17330 aten::Int pnnx_18565 1 1 17330 17331 prim::Constant pnnx_18566 0 1 25861 value=6 prim::ListConstruct pnnx_18567 5 1 17331 17329 25861 17298 17297 17332 prim::Constant pnnx_18569 0 1 25862 value=1 prim::Constant pnnx_18571 0 1 25863 value=0 prim::Constant pnnx_18573 0 1 25864 value=1 Tensor.view Tensor.view_1997 2 1 attn0.57 17332 17333 $input=attn0.57 $shape=17332 #attn0.57=(36,6,64,64)f32 #17333=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3400 2 1 attn_mask.93 25862 17334 $input=attn_mask.93 $dim=25862 #attn_mask.93=(36,64,64)f32 #17334=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3401 2 1 17334 25863 17335 $input=17334 $dim=25863 #17334=(36,1,64,64)f32 #17335=(1,36,1,64,64)f32 aten::add pnnx_18574 3 1 17333 17335 25864 attn1.57 #17333=(1,36,6,64,64)f32 #17335=(1,36,1,64,64)f32 #attn1.57=(1,36,6,64,64)f32 prim::Constant pnnx_18575 0 1 25865 value=-1 prim::Constant pnnx_18576 0 1 25866 value=6 prim::ListConstruct pnnx_18577 4 1 25865 25866 17296 17295 17337 Tensor.view Tensor.view_1998 2 1 attn1.57 17337 input.415 $input=attn1.57 $shape=17337 #attn1.57=(1,36,6,64,64)f32 #input.415=(36,6,64,64)f32 nn.Softmax layers_mmsa.3.residual_group.blocks.1.attn.softmax 1 1 input.415 17339 dim=-1 #input.415=(36,6,64,64)f32 #17339=(36,6,64,64)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.1.attn.attn_drop 1 1 17339 17340 #17339=(36,6,64,64)f32 #17340=(36,6,64,64)f32 Tensor.select Tensor.select_925 3 1 qkv0.113 25848 25849 v.185 $input=qkv0.113 $dim=25848 $index=25849 #qkv0.113=(3,36,6,64,32)f32 #v.185=(36,6,64,32)f32 prim::Constant pnnx_18580 0 1 25867 value=1 prim::Constant pnnx_18581 0 1 25868 value=2 torch.matmul torch.matmul_2387 2 1 17340 v.185 17341 $input=17340 $other=v.185 #17340=(36,6,64,64)f32 #v.185=(36,6,64,32)f32 #17341=(36,6,64,32)f32 prim::ListConstruct pnnx_18583 3 1 17290 17294 17302 17343 torch.transpose torch.transpose_3178 3 1 17341 25867 25868 17342 $input=17341 $dim0=25867 $dim1=25868 #17341=(36,6,64,32)f32 #17342=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_617 2 1 17342 17343 input0.117 $input=17342 $shape=17343 #17342=(36,64,6,32)f32 #input0.117=(36,64,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.1.attn.proj 1 1 input0.117 17345 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.117=(36,64,192)f32 #17345=(36,64,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.1.attn.proj_drop 1 1 17345 17346 #17345=(36,64,192)f32 #17346=(36,64,192)f32 prim::Constant pnnx_18585 0 1 25869 value=-1 prim::Constant pnnx_18586 0 1 25870 value=8 prim::Constant pnnx_18587 0 1 25871 value=8 prim::ListConstruct pnnx_18588 4 1 25869 25870 25871 17236 17347 prim::Constant pnnx_18590 0 1 25872 value=8 prim::Constant pnnx_18591 0 1 25873 value=trunc aten::div pnnx_18592 3 1 H1.1 25872 25873 17349 aten::Int pnnx_18593 1 1 17349 17350 prim::Constant pnnx_18594 0 1 25874 value=8 prim::Constant pnnx_18595 0 1 25875 value=trunc aten::div pnnx_18596 3 1 W1.1 25874 25875 17351 aten::Int pnnx_18597 1 1 17351 17352 prim::Constant pnnx_18598 0 1 25876 value=1 prim::Constant pnnx_18599 0 1 25877 value=8 prim::Constant pnnx_18600 0 1 25878 value=8 prim::Constant pnnx_18601 0 1 25879 value=-1 prim::ListConstruct pnnx_18602 6 1 25876 17350 17352 25877 25878 25879 17353 prim::Constant pnnx_18604 0 1 25880 value=0 prim::Constant pnnx_18605 0 1 25881 value=1 prim::Constant pnnx_18606 0 1 25882 value=3 prim::Constant pnnx_18607 0 1 25883 value=2 prim::Constant pnnx_18608 0 1 25884 value=4 prim::Constant pnnx_18609 0 1 25885 value=5 prim::ListConstruct pnnx_18610 6 1 25880 25881 25882 25883 25884 25885 17355 Tensor.view Tensor.view_1999 2 1 17346 17347 windows.185 $input=17346 $shape=17347 #17346=(36,64,192)f32 #windows.185=(36,8,8,192)f32 Tensor.view Tensor.view_2000 2 1 windows.185 17353 x3.113 $input=windows.185 $shape=17353 #windows.185=(36,8,8,192)f32 #x3.113=(1,6,6,8,8,192)f32 prim::Constant pnnx_18614 0 1 25887 value=1 prim::Constant pnnx_18615 0 1 25888 value=-1 prim::ListConstruct pnnx_18616 4 1 25887 1645 1885 25888 17358 torch.permute torch.permute_2895 2 1 x3.113 17355 17356 $input=x3.113 $dims=17355 #x3.113=(1,6,6,8,8,192)f32 #17356=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_277 1 1 17356 17357 memory_format=torch.contiguous_format $input=17356 #17356=(1,6,8,6,8,192)f32 #17357=(1,6,8,6,8,192)f32 prim::Constant pnnx_18618 0 1 25889 value=4 prim::Constant pnnx_18619 0 1 25890 value=4 prim::ListConstruct pnnx_18620 2 1 25889 25890 17360 prim::Constant pnnx_18621 0 1 25891 value=1 prim::Constant pnnx_18622 0 1 25892 value=2 prim::ListConstruct pnnx_18623 2 1 25891 25892 17361 Tensor.view Tensor.view_2001 2 1 17357 17358 shifted_x.93 $input=17357 $shape=17358 #17357=(1,6,8,6,8,192)f32 #shifted_x.93=(1,48,48,192)f32 aten::mul pnnx_18625 2 1 H1.1 W1.1 17363 aten::Int pnnx_18626 1 1 17363 17364 prim::ListConstruct pnnx_18627 3 1 17231 17364 17235 17365 prim::Constant pnnx_18629 0 1 17367 value=None prim::Constant pnnx_18630 0 1 25893 value=1 torch.roll torch.roll_2511 3 1 shifted_x.93 17360 17361 x4.113 $input=shifted_x.93 $shifts=17360 $dims=17361 #shifted_x.93=(1,48,48,192)f32 #x4.113=(1,48,48,192)f32 Tensor.view Tensor.view_2002 2 1 x4.113 17365 x5.93 $input=x4.113 $shape=17365 #x4.113=(1,48,48,192)f32 #x5.93=(1,2304,192)f32 aten::add pnnx_18631 3 1 17210 x5.93 25893 input.417 #17210=(1,2304,192)f32 #x5.93=(1,2304,192)f32 #input.417=(1,2304,192)f32 nn.LayerNorm layers_mmsa.3.residual_group.blocks.1.norm2 1 1 input.417 17369 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.417=(1,2304,192)f32 #17369=(1,2304,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.1.mlp.fc1 1 1 17369 17374 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #17369=(1,2304,192)f32 #17374=(1,2304,384)f32 nn.GELU layers_mmsa.3.residual_group.blocks.1.mlp.act 1 1 17374 17375 #17374=(1,2304,384)f32 #17375=(1,2304,384)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.1.mlp.drop 1 1 17375 17376 #17375=(1,2304,384)f32 #17376=(1,2304,384)f32 nn.Linear layers_mmsa.3.residual_group.blocks.1.mlp.fc2 1 1 17376 17377 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #17376=(1,2304,384)f32 #17377=(1,2304,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.1.mlp.drop 1 1 17377 17378 #17377=(1,2304,192)f32 #17378=(1,2304,192)f32 prim::Constant pnnx_18632 0 1 17379 value=None prim::Constant pnnx_18633 0 1 25894 value=1 aten::add pnnx_18634 3 1 input.417 17378 25894 17380 #input.417=(1,2304,192)f32 #17378=(1,2304,192)f32 #17380=(1,2304,192)f32 prim::Constant pnnx_18635 0 1 17381 value=trunc prim::Constant pnnx_18636 0 1 17382 value=8 prim::Constant pnnx_18637 0 1 17383 value=0 prim::Constant pnnx_18638 0 1 17384 value=2 prim::Constant pnnx_18639 0 1 17385 value=1 prim::Constant pnnx_18640 0 1 17386 value=3 prim::Constant pnnx_18641 0 1 17387 value=8 prim::Constant pnnx_18642 0 1 17388 value=4 prim::Constant pnnx_18643 0 1 17389 value=5 prim::Constant pnnx_18644 0 1 17390 value=-1 prim::Constant pnnx_18645 0 1 17391 value=64 aten::size pnnx_18646 2 1 17380 17383 17397 #17380=(1,2304,192)f32 prim::NumToTensor pnnx_18647 1 1 17397 B.223 aten::Int pnnx_18648 1 1 B.223 17399 aten::Int pnnx_18649 1 1 B.223 17400 aten::size pnnx_18650 2 1 17380 17384 17401 #17380=(1,2304,192)f32 prim::NumToTensor pnnx_18651 1 1 17401 C.379 aten::Int pnnx_18652 1 1 C.379 17403 aten::Int pnnx_18653 1 1 C.379 17404 aten::Int pnnx_18654 1 1 C.379 17405 aten::Int pnnx_18655 1 1 C.379 17406 nn.LayerNorm layers_mmsa.3.residual_group.blocks.2.norm1 1 1 17380 17407 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #17380=(1,2304,192)f32 #17407=(1,2304,192)f32 prim::ListConstruct pnnx_18656 4 1 17400 1642 1882 17406 17408 prim::Constant pnnx_18658 0 1 25895 value=0 Tensor.view Tensor.view_2003 2 1 17407 17408 x.187 $input=17407 $shape=17408 #17407=(1,2304,192)f32 #x.187=(1,48,48,192)f32 aten::size pnnx_18659 2 1 x.187 25895 17410 #x.187=(1,48,48,192)f32 prim::NumToTensor pnnx_18660 1 1 17410 B0.115 aten::Int pnnx_18661 1 1 B0.115 17412 aten::size pnnx_18662 2 1 x.187 17385 17413 #x.187=(1,48,48,192)f32 prim::NumToTensor pnnx_18663 1 1 17413 17414 prim::Constant pnnx_18664 0 1 25896 value=2 aten::size pnnx_18665 2 1 x.187 25896 17415 #x.187=(1,48,48,192)f32 prim::NumToTensor pnnx_18666 1 1 17415 17416 aten::size pnnx_18667 2 1 x.187 17386 17417 #x.187=(1,48,48,192)f32 prim::NumToTensor pnnx_18668 1 1 17417 C0.115 aten::Int pnnx_18669 1 1 C0.115 17419 aten::Int pnnx_18670 1 1 C0.115 17420 aten::div pnnx_18671 3 1 17414 17382 17381 17421 aten::Int pnnx_18672 1 1 17421 17422 prim::Constant pnnx_18673 0 1 25897 value=8 prim::Constant pnnx_18674 0 1 25898 value=trunc aten::div pnnx_18675 3 1 17416 25897 25898 17423 aten::Int pnnx_18676 1 1 17423 17424 prim::Constant pnnx_18677 0 1 25899 value=8 prim::ListConstruct pnnx_18678 6 1 17412 17422 17387 17424 25899 17420 17425 prim::Constant pnnx_18680 0 1 25900 value=0 prim::Constant pnnx_18681 0 1 25901 value=1 prim::Constant pnnx_18682 0 1 25902 value=3 prim::Constant pnnx_18683 0 1 25903 value=2 prim::ListConstruct pnnx_18684 6 1 25900 25901 25902 25903 17388 17389 17427 Tensor.view Tensor.view_2004 2 1 x.187 17425 x0.115 $input=x.187 $shape=17425 #x.187=(1,48,48,192)f32 #x0.115=(1,6,8,6,8,192)f32 prim::Constant pnnx_18688 0 1 25905 value=8 prim::Constant pnnx_18689 0 1 25906 value=8 prim::ListConstruct pnnx_18690 4 1 17390 25905 25906 17419 17430 torch.permute torch.permute_2896 2 1 x0.115 17427 17428 $input=x0.115 $dims=17427 #x0.115=(1,6,8,6,8,192)f32 #17428=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_278 1 1 17428 17429 memory_format=torch.contiguous_format $input=17428 #17428=(1,6,6,8,8,192)f32 #17429=(1,6,6,8,8,192)f32 prim::Constant pnnx_18692 0 1 25907 value=-1 prim::ListConstruct pnnx_18693 3 1 25907 17391 17405 17432 prim::Constant pnnx_18695 0 1 17434 value=1.767767e-01 prim::Constant pnnx_18696 0 1 17435 value=trunc prim::Constant pnnx_18697 0 1 17436 value=6 prim::Constant pnnx_18698 0 1 17437 value=0 prim::Constant pnnx_18699 0 1 17438 value=1 prim::Constant pnnx_18700 0 1 17439 value=2 prim::Constant pnnx_18701 0 1 17440 value=3 prim::Constant pnnx_18702 0 1 17441 value=6 prim::Constant pnnx_18703 0 1 17442 value=4 prim::Constant pnnx_18704 0 1 17443 value=-2 prim::Constant pnnx_18705 0 1 17444 value=-1 prim::Constant pnnx_18706 0 1 17445 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.2.attn 0 1 relative_position_bias_table.187 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.187=(225,6)f32 pnnx.Attribute layers_mmsa.3.residual_group.blocks.2.attn 0 1 relative_position_index.187 @relative_position_index=(64,64)i64 #relative_position_index.187=(64,64)i64 Tensor.view Tensor.view_2005 2 1 17429 17430 x_windows.187 $input=17429 $shape=17430 #17429=(1,6,6,8,8,192)f32 #x_windows.187=(36,8,8,192)f32 Tensor.view Tensor.view_2006 2 1 x_windows.187 17432 x1.115 $input=x_windows.187 $shape=17432 #x_windows.187=(36,8,8,192)f32 #x1.115=(36,64,192)f32 aten::size pnnx_18707 2 1 x1.115 17437 17453 #x1.115=(36,64,192)f32 prim::NumToTensor pnnx_18708 1 1 17453 B_.187 aten::Int pnnx_18709 1 1 B_.187 17455 aten::Int pnnx_18710 1 1 B_.187 17456 aten::size pnnx_18711 2 1 x1.115 17438 17457 #x1.115=(36,64,192)f32 prim::NumToTensor pnnx_18712 1 1 17457 N.187 aten::Int pnnx_18713 1 1 N.187 17459 aten::Int pnnx_18714 1 1 N.187 17460 aten::size pnnx_18715 2 1 x1.115 17439 17461 #x1.115=(36,64,192)f32 prim::NumToTensor pnnx_18716 1 1 17461 C.381 aten::Int pnnx_18717 1 1 C.381 17463 nn.Linear layers_mmsa.3.residual_group.blocks.2.attn.qkv 1 1 x1.115 17464 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.115=(36,64,192)f32 #17464=(36,64,576)f32 aten::div pnnx_18718 3 1 C.381 17436 17435 17465 aten::Int pnnx_18719 1 1 17465 17466 prim::ListConstruct pnnx_18720 5 1 17456 17460 17440 17441 17466 17467 prim::Constant pnnx_18722 0 1 25908 value=2 prim::Constant pnnx_18723 0 1 25909 value=0 prim::Constant pnnx_18724 0 1 25910 value=3 prim::Constant pnnx_18725 0 1 25911 value=1 prim::ListConstruct pnnx_18726 5 1 25908 25909 25910 25911 17442 17469 Tensor.reshape Tensor.reshape_618 2 1 17464 17467 17468 $input=17464 $shape=17467 #17464=(36,64,576)f32 #17468=(36,64,3,6,32)f32 prim::Constant pnnx_18728 0 1 25912 value=0 prim::Constant pnnx_18729 0 1 25913 value=0 prim::Constant pnnx_18731 0 1 25914 value=0 prim::Constant pnnx_18732 0 1 25915 value=1 prim::Constant pnnx_18734 0 1 25916 value=0 prim::Constant pnnx_18735 0 1 25917 value=2 torch.permute torch.permute_2897 2 1 17468 17469 qkv0.115 $input=17468 $dims=17469 #17468=(36,64,3,6,32)f32 #qkv0.115=(3,36,6,64,32)f32 Tensor.select Tensor.select_926 3 1 qkv0.115 25912 25913 q.187 $input=qkv0.115 $dim=25912 $index=25913 #qkv0.115=(3,36,6,64,32)f32 #q.187=(36,6,64,32)f32 aten::mul pnnx_18737 2 1 q.187 17434 q0.115 #q.187=(36,6,64,32)f32 #q0.115=(36,6,64,32)f32 Tensor.select Tensor.select_927 3 1 qkv0.115 25914 25915 k.187 $input=qkv0.115 $dim=25914 $index=25915 #qkv0.115=(3,36,6,64,32)f32 #k.187=(36,6,64,32)f32 prim::Constant pnnx_18740 0 1 25918 value=-1 prim::ListConstruct pnnx_18741 1 1 25918 17477 Tensor.view Tensor.view_2007 2 1 relative_position_index.187 17477 17478 $input=relative_position_index.187 $shape=17477 #relative_position_index.187=(64,64)i64 #17478=(4096)i64 prim::ListConstruct pnnx_18743 1 1 17478 17479 #17478=(4096)i64 prim::Constant pnnx_18745 0 1 25919 value=64 prim::Constant pnnx_18746 0 1 25920 value=-1 prim::ListConstruct pnnx_18747 3 1 17445 25919 25920 17481 Tensor.index Tensor.index_418 2 1 relative_position_bias_table.187 17479 17480 $input=relative_position_bias_table.187 $expr=17479 #relative_position_bias_table.187=(225,6)f32 #17480=(4096,6)f32 prim::Constant pnnx_18749 0 1 25921 value=2 prim::Constant pnnx_18750 0 1 25922 value=0 prim::Constant pnnx_18751 0 1 25923 value=1 prim::ListConstruct pnnx_18752 3 1 25921 25922 25923 17483 Tensor.view Tensor.view_2008 2 1 17480 17481 relative_position_bias.187 $input=17480 $shape=17481 #17480=(4096,6)f32 #relative_position_bias.187=(64,64,6)f32 prim::Constant pnnx_18756 0 1 25925 value=0 torch.permute torch.permute_2898 2 1 relative_position_bias.187 17483 17484 $input=relative_position_bias.187 $dims=17483 #relative_position_bias.187=(64,64,6)f32 #17484=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_279 1 1 17484 relative_position_bias0.115 memory_format=torch.contiguous_format $input=17484 #17484=(6,64,64)f32 #relative_position_bias0.115=(6,64,64)f32 prim::Constant pnnx_18758 0 1 25926 value=1 torch.transpose torch.transpose_3179 3 1 k.187 17443 17444 17475 $input=k.187 $dim0=17443 $dim1=17444 #k.187=(36,6,64,32)f32 #17475=(36,6,32,64)f32 torch.matmul torch.matmul_2388 2 1 q0.115 17475 attn.375 $input=q0.115 $other=17475 #q0.115=(36,6,64,32)f32 #17475=(36,6,32,64)f32 #attn.375=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3402 2 1 relative_position_bias0.115 25925 17486 $input=relative_position_bias0.115 $dim=25925 #relative_position_bias0.115=(6,64,64)f32 #17486=(1,6,64,64)f32 aten::add pnnx_18759 3 1 attn.375 17486 25926 input.419 #attn.375=(36,6,64,64)f32 #17486=(1,6,64,64)f32 #input.419=(36,6,64,64)f32 nn.Softmax layers_mmsa.3.residual_group.blocks.2.attn.softmax 1 1 input.419 17488 dim=-1 #input.419=(36,6,64,64)f32 #17488=(36,6,64,64)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.2.attn.attn_drop 1 1 17488 17489 #17488=(36,6,64,64)f32 #17489=(36,6,64,64)f32 Tensor.select Tensor.select_928 3 1 qkv0.115 25916 25917 v.187 $input=qkv0.115 $dim=25916 $index=25917 #qkv0.115=(3,36,6,64,32)f32 #v.187=(36,6,64,32)f32 prim::Constant pnnx_18761 0 1 25927 value=1 prim::Constant pnnx_18762 0 1 25928 value=2 torch.matmul torch.matmul_2389 2 1 17489 v.187 17490 $input=17489 $other=v.187 #17489=(36,6,64,64)f32 #v.187=(36,6,64,32)f32 #17490=(36,6,64,32)f32 prim::ListConstruct pnnx_18764 3 1 17455 17459 17463 17492 torch.transpose torch.transpose_3180 3 1 17490 25927 25928 17491 $input=17490 $dim0=25927 $dim1=25928 #17490=(36,6,64,32)f32 #17491=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_619 2 1 17491 17492 input0.119 $input=17491 $shape=17492 #17491=(36,64,6,32)f32 #input0.119=(36,64,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.2.attn.proj 1 1 input0.119 17494 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.119=(36,64,192)f32 #17494=(36,64,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.2.attn.proj_drop 1 1 17494 17495 #17494=(36,64,192)f32 #17495=(36,64,192)f32 prim::Constant pnnx_18766 0 1 25929 value=-1 prim::Constant pnnx_18767 0 1 25930 value=8 prim::Constant pnnx_18768 0 1 25931 value=8 prim::ListConstruct pnnx_18769 4 1 25929 25930 25931 17404 17496 prim::Constant pnnx_18771 0 1 25932 value=8 prim::Constant pnnx_18772 0 1 25933 value=trunc aten::div pnnx_18773 3 1 H1.1 25932 25933 17498 aten::Int pnnx_18774 1 1 17498 17499 prim::Constant pnnx_18775 0 1 25934 value=8 prim::Constant pnnx_18776 0 1 25935 value=trunc aten::div pnnx_18777 3 1 W1.1 25934 25935 17500 aten::Int pnnx_18778 1 1 17500 17501 prim::Constant pnnx_18779 0 1 25936 value=1 prim::Constant pnnx_18780 0 1 25937 value=8 prim::Constant pnnx_18781 0 1 25938 value=8 prim::Constant pnnx_18782 0 1 25939 value=-1 prim::ListConstruct pnnx_18783 6 1 25936 17499 17501 25937 25938 25939 17502 prim::Constant pnnx_18785 0 1 25940 value=0 prim::Constant pnnx_18786 0 1 25941 value=1 prim::Constant pnnx_18787 0 1 25942 value=3 prim::Constant pnnx_18788 0 1 25943 value=2 prim::Constant pnnx_18789 0 1 25944 value=4 prim::Constant pnnx_18790 0 1 25945 value=5 prim::ListConstruct pnnx_18791 6 1 25940 25941 25942 25943 25944 25945 17504 Tensor.view Tensor.view_2009 2 1 17495 17496 windows.187 $input=17495 $shape=17496 #17495=(36,64,192)f32 #windows.187=(36,8,8,192)f32 Tensor.view Tensor.view_2010 2 1 windows.187 17502 x2.115 $input=windows.187 $shape=17502 #windows.187=(36,8,8,192)f32 #x2.115=(1,6,6,8,8,192)f32 prim::Constant pnnx_18795 0 1 25947 value=1 prim::Constant pnnx_18796 0 1 25948 value=-1 prim::ListConstruct pnnx_18797 4 1 25947 1639 1879 25948 17507 torch.permute torch.permute_2899 2 1 x2.115 17504 17505 $input=x2.115 $dims=17504 #x2.115=(1,6,6,8,8,192)f32 #17505=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_280 1 1 17505 17506 memory_format=torch.contiguous_format $input=17505 #17505=(1,6,8,6,8,192)f32 #17506=(1,6,8,6,8,192)f32 aten::mul pnnx_18799 2 1 H1.1 W1.1 17509 aten::Int pnnx_18800 1 1 17509 17510 prim::ListConstruct pnnx_18801 3 1 17399 17510 17403 17511 prim::Constant pnnx_18803 0 1 17513 value=None prim::Constant pnnx_18804 0 1 25949 value=1 Tensor.view Tensor.view_2011 2 1 17506 17507 x3.115 $input=17506 $shape=17507 #17506=(1,6,8,6,8,192)f32 #x3.115=(1,48,48,192)f32 Tensor.view Tensor.view_2012 2 1 x3.115 17511 x4.115 $input=x3.115 $shape=17511 #x3.115=(1,48,48,192)f32 #x4.115=(1,2304,192)f32 aten::add pnnx_18805 3 1 17380 x4.115 25949 input.421 #17380=(1,2304,192)f32 #x4.115=(1,2304,192)f32 #input.421=(1,2304,192)f32 nn.LayerNorm layers_mmsa.3.residual_group.blocks.2.norm2 1 1 input.421 17515 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.421=(1,2304,192)f32 #17515=(1,2304,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.2.mlp.fc1 1 1 17515 17520 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #17515=(1,2304,192)f32 #17520=(1,2304,384)f32 nn.GELU layers_mmsa.3.residual_group.blocks.2.mlp.act 1 1 17520 17521 #17520=(1,2304,384)f32 #17521=(1,2304,384)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.2.mlp.drop 1 1 17521 17522 #17521=(1,2304,384)f32 #17522=(1,2304,384)f32 nn.Linear layers_mmsa.3.residual_group.blocks.2.mlp.fc2 1 1 17522 17523 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #17522=(1,2304,384)f32 #17523=(1,2304,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.2.mlp.drop 1 1 17523 17524 #17523=(1,2304,192)f32 #17524=(1,2304,192)f32 prim::Constant pnnx_18806 0 1 17525 value=None prim::Constant pnnx_18807 0 1 25950 value=1 aten::add pnnx_18808 3 1 input.421 17524 25950 17526 #input.421=(1,2304,192)f32 #17524=(1,2304,192)f32 #17526=(1,2304,192)f32 prim::Constant pnnx_18809 0 1 17527 value=trunc prim::Constant pnnx_18810 0 1 17528 value=8 prim::Constant pnnx_18811 0 1 17529 value=0 prim::Constant pnnx_18812 0 1 17530 value=2 prim::Constant pnnx_18813 0 1 17531 value=-4 prim::Constant pnnx_18814 0 1 17532 value=1 prim::Constant pnnx_18815 0 1 17533 value=3 prim::Constant pnnx_18816 0 1 17534 value=8 prim::Constant pnnx_18817 0 1 17535 value=4 prim::Constant pnnx_18818 0 1 17536 value=5 prim::Constant pnnx_18819 0 1 17537 value=-1 prim::Constant pnnx_18820 0 1 17538 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.3 0 1 attn_mask.95 @attn_mask=(36,64,64)f32 #attn_mask.95=(36,64,64)f32 aten::size pnnx_18821 2 1 17526 17529 17545 #17526=(1,2304,192)f32 prim::NumToTensor pnnx_18822 1 1 17545 B.225 aten::Int pnnx_18823 1 1 B.225 17547 aten::Int pnnx_18824 1 1 B.225 17548 aten::size pnnx_18825 2 1 17526 17530 17549 #17526=(1,2304,192)f32 prim::NumToTensor pnnx_18826 1 1 17549 C.383 aten::Int pnnx_18827 1 1 C.383 17551 aten::Int pnnx_18828 1 1 C.383 17552 aten::Int pnnx_18829 1 1 C.383 17553 aten::Int pnnx_18830 1 1 C.383 17554 nn.LayerNorm layers_mmsa.3.residual_group.blocks.3.norm1 1 1 17526 17555 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #17526=(1,2304,192)f32 #17555=(1,2304,192)f32 prim::ListConstruct pnnx_18831 4 1 17548 1636 1876 17554 17556 prim::Constant pnnx_18833 0 1 25951 value=-4 prim::ListConstruct pnnx_18834 2 1 17531 25951 17558 prim::Constant pnnx_18835 0 1 25952 value=2 prim::ListConstruct pnnx_18836 2 1 17532 25952 17559 Tensor.view Tensor.view_2013 2 1 17555 17556 x.189 $input=17555 $shape=17556 #17555=(1,2304,192)f32 #x.189=(1,48,48,192)f32 prim::Constant pnnx_18838 0 1 25953 value=0 torch.roll torch.roll_2512 3 1 x.189 17558 17559 x0.117 $input=x.189 $shifts=17558 $dims=17559 #x.189=(1,48,48,192)f32 #x0.117=(1,48,48,192)f32 aten::size pnnx_18839 2 1 x0.117 25953 17561 #x0.117=(1,48,48,192)f32 prim::NumToTensor pnnx_18840 1 1 17561 B0.117 aten::Int pnnx_18841 1 1 B0.117 17563 prim::Constant pnnx_18842 0 1 25954 value=1 aten::size pnnx_18843 2 1 x0.117 25954 17564 #x0.117=(1,48,48,192)f32 prim::NumToTensor pnnx_18844 1 1 17564 17565 prim::Constant pnnx_18845 0 1 25955 value=2 aten::size pnnx_18846 2 1 x0.117 25955 17566 #x0.117=(1,48,48,192)f32 prim::NumToTensor pnnx_18847 1 1 17566 17567 aten::size pnnx_18848 2 1 x0.117 17533 17568 #x0.117=(1,48,48,192)f32 prim::NumToTensor pnnx_18849 1 1 17568 C0.117 aten::Int pnnx_18850 1 1 C0.117 17570 aten::Int pnnx_18851 1 1 C0.117 17571 aten::div pnnx_18852 3 1 17565 17528 17527 17572 aten::Int pnnx_18853 1 1 17572 17573 prim::Constant pnnx_18854 0 1 25956 value=8 prim::Constant pnnx_18855 0 1 25957 value=trunc aten::div pnnx_18856 3 1 17567 25956 25957 17574 aten::Int pnnx_18857 1 1 17574 17575 prim::Constant pnnx_18858 0 1 25958 value=8 prim::ListConstruct pnnx_18859 6 1 17563 17573 17534 17575 25958 17571 17576 prim::Constant pnnx_18861 0 1 25959 value=0 prim::Constant pnnx_18862 0 1 25960 value=1 prim::Constant pnnx_18863 0 1 25961 value=3 prim::Constant pnnx_18864 0 1 25962 value=2 prim::ListConstruct pnnx_18865 6 1 25959 25960 25961 25962 17535 17536 17578 Tensor.view Tensor.view_2014 2 1 x0.117 17576 x1.117 $input=x0.117 $shape=17576 #x0.117=(1,48,48,192)f32 #x1.117=(1,6,8,6,8,192)f32 prim::Constant pnnx_18869 0 1 25964 value=8 prim::Constant pnnx_18870 0 1 25965 value=8 prim::ListConstruct pnnx_18871 4 1 17537 25964 25965 17570 17581 torch.permute torch.permute_2900 2 1 x1.117 17578 17579 $input=x1.117 $dims=17578 #x1.117=(1,6,8,6,8,192)f32 #17579=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_281 1 1 17579 17580 memory_format=torch.contiguous_format $input=17579 #17579=(1,6,6,8,8,192)f32 #17580=(1,6,6,8,8,192)f32 prim::Constant pnnx_18873 0 1 25966 value=-1 prim::ListConstruct pnnx_18874 3 1 25966 17538 17553 17583 prim::Constant pnnx_18876 0 1 17585 value=1.767767e-01 prim::Constant pnnx_18877 0 1 17586 value=trunc prim::Constant pnnx_18878 0 1 17587 value=6 prim::Constant pnnx_18879 0 1 17588 value=0 prim::Constant pnnx_18880 0 1 17589 value=1 prim::Constant pnnx_18881 0 1 17590 value=2 prim::Constant pnnx_18882 0 1 17591 value=3 prim::Constant pnnx_18883 0 1 17592 value=6 prim::Constant pnnx_18884 0 1 17593 value=4 prim::Constant pnnx_18885 0 1 17594 value=-2 prim::Constant pnnx_18886 0 1 17595 value=-1 prim::Constant pnnx_18887 0 1 17596 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.3.attn 0 1 relative_position_bias_table.189 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.189=(225,6)f32 pnnx.Attribute layers_mmsa.3.residual_group.blocks.3.attn 0 1 relative_position_index.189 @relative_position_index=(64,64)i64 #relative_position_index.189=(64,64)i64 Tensor.view Tensor.view_2015 2 1 17580 17581 x_windows.189 $input=17580 $shape=17581 #17580=(1,6,6,8,8,192)f32 #x_windows.189=(36,8,8,192)f32 Tensor.view Tensor.view_2016 2 1 x_windows.189 17583 x2.117 $input=x_windows.189 $shape=17583 #x_windows.189=(36,8,8,192)f32 #x2.117=(36,64,192)f32 aten::size pnnx_18888 2 1 x2.117 17588 17604 #x2.117=(36,64,192)f32 prim::NumToTensor pnnx_18889 1 1 17604 B_.189 aten::Int pnnx_18890 1 1 B_.189 17606 aten::Int pnnx_18891 1 1 B_.189 17607 aten::size pnnx_18892 2 1 x2.117 17589 17608 #x2.117=(36,64,192)f32 prim::NumToTensor pnnx_18893 1 1 17608 N.189 aten::Int pnnx_18894 1 1 N.189 17610 aten::Int pnnx_18895 1 1 N.189 17611 aten::Int pnnx_18896 1 1 N.189 17612 aten::Int pnnx_18897 1 1 N.189 17613 aten::Int pnnx_18898 1 1 N.189 17614 aten::Int pnnx_18899 1 1 N.189 17615 aten::size pnnx_18900 2 1 x2.117 17590 17616 #x2.117=(36,64,192)f32 prim::NumToTensor pnnx_18901 1 1 17616 C.385 aten::Int pnnx_18902 1 1 C.385 17618 nn.Linear layers_mmsa.3.residual_group.blocks.3.attn.qkv 1 1 x2.117 17619 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.117=(36,64,192)f32 #17619=(36,64,576)f32 aten::div pnnx_18903 3 1 C.385 17587 17586 17620 aten::Int pnnx_18904 1 1 17620 17621 prim::ListConstruct pnnx_18905 5 1 17607 17615 17591 17592 17621 17622 prim::Constant pnnx_18907 0 1 25967 value=2 prim::Constant pnnx_18908 0 1 25968 value=0 prim::Constant pnnx_18909 0 1 25969 value=3 prim::Constant pnnx_18910 0 1 25970 value=1 prim::ListConstruct pnnx_18911 5 1 25967 25968 25969 25970 17593 17624 Tensor.reshape Tensor.reshape_620 2 1 17619 17622 17623 $input=17619 $shape=17622 #17619=(36,64,576)f32 #17623=(36,64,3,6,32)f32 prim::Constant pnnx_18913 0 1 25971 value=0 prim::Constant pnnx_18914 0 1 25972 value=0 prim::Constant pnnx_18916 0 1 25973 value=0 prim::Constant pnnx_18917 0 1 25974 value=1 prim::Constant pnnx_18919 0 1 25975 value=0 prim::Constant pnnx_18920 0 1 25976 value=2 torch.permute torch.permute_2901 2 1 17623 17624 qkv0.117 $input=17623 $dims=17624 #17623=(36,64,3,6,32)f32 #qkv0.117=(3,36,6,64,32)f32 Tensor.select Tensor.select_929 3 1 qkv0.117 25971 25972 q.189 $input=qkv0.117 $dim=25971 $index=25972 #qkv0.117=(3,36,6,64,32)f32 #q.189=(36,6,64,32)f32 aten::mul pnnx_18922 2 1 q.189 17585 q0.117 #q.189=(36,6,64,32)f32 #q0.117=(36,6,64,32)f32 Tensor.select Tensor.select_930 3 1 qkv0.117 25973 25974 k.189 $input=qkv0.117 $dim=25973 $index=25974 #qkv0.117=(3,36,6,64,32)f32 #k.189=(36,6,64,32)f32 prim::Constant pnnx_18925 0 1 25977 value=-1 prim::ListConstruct pnnx_18926 1 1 25977 17632 Tensor.view Tensor.view_2017 2 1 relative_position_index.189 17632 17633 $input=relative_position_index.189 $shape=17632 #relative_position_index.189=(64,64)i64 #17633=(4096)i64 prim::ListConstruct pnnx_18928 1 1 17633 17634 #17633=(4096)i64 prim::Constant pnnx_18930 0 1 25978 value=64 prim::Constant pnnx_18931 0 1 25979 value=-1 prim::ListConstruct pnnx_18932 3 1 17596 25978 25979 17636 Tensor.index Tensor.index_419 2 1 relative_position_bias_table.189 17634 17635 $input=relative_position_bias_table.189 $expr=17634 #relative_position_bias_table.189=(225,6)f32 #17635=(4096,6)f32 prim::Constant pnnx_18934 0 1 25980 value=2 prim::Constant pnnx_18935 0 1 25981 value=0 prim::Constant pnnx_18936 0 1 25982 value=1 prim::ListConstruct pnnx_18937 3 1 25980 25981 25982 17638 Tensor.view Tensor.view_2018 2 1 17635 17636 relative_position_bias.189 $input=17635 $shape=17636 #17635=(4096,6)f32 #relative_position_bias.189=(64,64,6)f32 prim::Constant pnnx_18941 0 1 25984 value=0 torch.permute torch.permute_2902 2 1 relative_position_bias.189 17638 17639 $input=relative_position_bias.189 $dims=17638 #relative_position_bias.189=(64,64,6)f32 #17639=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_282 1 1 17639 relative_position_bias0.117 memory_format=torch.contiguous_format $input=17639 #17639=(6,64,64)f32 #relative_position_bias0.117=(6,64,64)f32 prim::Constant pnnx_18943 0 1 25985 value=1 torch.transpose torch.transpose_3181 3 1 k.189 17594 17595 17630 $input=k.189 $dim0=17594 $dim1=17595 #k.189=(36,6,64,32)f32 #17630=(36,6,32,64)f32 torch.matmul torch.matmul_2390 2 1 q0.117 17630 attn.379 $input=q0.117 $other=17630 #q0.117=(36,6,64,32)f32 #17630=(36,6,32,64)f32 #attn.379=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3403 2 1 relative_position_bias0.117 25984 17641 $input=relative_position_bias0.117 $dim=25984 #relative_position_bias0.117=(6,64,64)f32 #17641=(1,6,64,64)f32 aten::add pnnx_18944 3 1 attn.379 17641 25985 attn0.59 #attn.379=(36,6,64,64)f32 #17641=(1,6,64,64)f32 #attn0.59=(36,6,64,64)f32 prim::Constant pnnx_18945 0 1 25986 value=0 aten::size pnnx_18946 2 1 attn_mask.95 25986 17643 #attn_mask.95=(36,64,64)f32 prim::NumToTensor pnnx_18947 1 1 17643 other.95 aten::Int pnnx_18948 1 1 other.95 17645 prim::Constant pnnx_18949 0 1 25987 value=trunc aten::div pnnx_18950 3 1 B_.189 other.95 25987 17646 aten::Int pnnx_18951 1 1 17646 17647 prim::Constant pnnx_18952 0 1 25988 value=6 prim::ListConstruct pnnx_18953 5 1 17647 17645 25988 17614 17613 17648 prim::Constant pnnx_18955 0 1 25989 value=1 prim::Constant pnnx_18957 0 1 25990 value=0 prim::Constant pnnx_18959 0 1 25991 value=1 Tensor.view Tensor.view_2019 2 1 attn0.59 17648 17649 $input=attn0.59 $shape=17648 #attn0.59=(36,6,64,64)f32 #17649=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3404 2 1 attn_mask.95 25989 17650 $input=attn_mask.95 $dim=25989 #attn_mask.95=(36,64,64)f32 #17650=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3405 2 1 17650 25990 17651 $input=17650 $dim=25990 #17650=(36,1,64,64)f32 #17651=(1,36,1,64,64)f32 aten::add pnnx_18960 3 1 17649 17651 25991 attn1.59 #17649=(1,36,6,64,64)f32 #17651=(1,36,1,64,64)f32 #attn1.59=(1,36,6,64,64)f32 prim::Constant pnnx_18961 0 1 25992 value=-1 prim::Constant pnnx_18962 0 1 25993 value=6 prim::ListConstruct pnnx_18963 4 1 25992 25993 17612 17611 17653 Tensor.view Tensor.view_2020 2 1 attn1.59 17653 input.423 $input=attn1.59 $shape=17653 #attn1.59=(1,36,6,64,64)f32 #input.423=(36,6,64,64)f32 nn.Softmax layers_mmsa.3.residual_group.blocks.3.attn.softmax 1 1 input.423 17655 dim=-1 #input.423=(36,6,64,64)f32 #17655=(36,6,64,64)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.3.attn.attn_drop 1 1 17655 17656 #17655=(36,6,64,64)f32 #17656=(36,6,64,64)f32 Tensor.select Tensor.select_931 3 1 qkv0.117 25975 25976 v.189 $input=qkv0.117 $dim=25975 $index=25976 #qkv0.117=(3,36,6,64,32)f32 #v.189=(36,6,64,32)f32 prim::Constant pnnx_18966 0 1 25994 value=1 prim::Constant pnnx_18967 0 1 25995 value=2 torch.matmul torch.matmul_2391 2 1 17656 v.189 17657 $input=17656 $other=v.189 #17656=(36,6,64,64)f32 #v.189=(36,6,64,32)f32 #17657=(36,6,64,32)f32 prim::ListConstruct pnnx_18969 3 1 17606 17610 17618 17659 torch.transpose torch.transpose_3182 3 1 17657 25994 25995 17658 $input=17657 $dim0=25994 $dim1=25995 #17657=(36,6,64,32)f32 #17658=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_621 2 1 17658 17659 input0.121 $input=17658 $shape=17659 #17658=(36,64,6,32)f32 #input0.121=(36,64,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.3.attn.proj 1 1 input0.121 17661 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.121=(36,64,192)f32 #17661=(36,64,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.3.attn.proj_drop 1 1 17661 17662 #17661=(36,64,192)f32 #17662=(36,64,192)f32 prim::Constant pnnx_18971 0 1 25996 value=-1 prim::Constant pnnx_18972 0 1 25997 value=8 prim::Constant pnnx_18973 0 1 25998 value=8 prim::ListConstruct pnnx_18974 4 1 25996 25997 25998 17552 17663 prim::Constant pnnx_18976 0 1 25999 value=8 prim::Constant pnnx_18977 0 1 26000 value=trunc aten::div pnnx_18978 3 1 H1.1 25999 26000 17665 aten::Int pnnx_18979 1 1 17665 17666 prim::Constant pnnx_18980 0 1 26001 value=8 prim::Constant pnnx_18981 0 1 26002 value=trunc aten::div pnnx_18982 3 1 W1.1 26001 26002 17667 aten::Int pnnx_18983 1 1 17667 17668 prim::Constant pnnx_18984 0 1 26003 value=1 prim::Constant pnnx_18985 0 1 26004 value=8 prim::Constant pnnx_18986 0 1 26005 value=8 prim::Constant pnnx_18987 0 1 26006 value=-1 prim::ListConstruct pnnx_18988 6 1 26003 17666 17668 26004 26005 26006 17669 prim::Constant pnnx_18990 0 1 26007 value=0 prim::Constant pnnx_18991 0 1 26008 value=1 prim::Constant pnnx_18992 0 1 26009 value=3 prim::Constant pnnx_18993 0 1 26010 value=2 prim::Constant pnnx_18994 0 1 26011 value=4 prim::Constant pnnx_18995 0 1 26012 value=5 prim::ListConstruct pnnx_18996 6 1 26007 26008 26009 26010 26011 26012 17671 Tensor.view Tensor.view_2021 2 1 17662 17663 windows.189 $input=17662 $shape=17663 #17662=(36,64,192)f32 #windows.189=(36,8,8,192)f32 Tensor.view Tensor.view_2022 2 1 windows.189 17669 x3.117 $input=windows.189 $shape=17669 #windows.189=(36,8,8,192)f32 #x3.117=(1,6,6,8,8,192)f32 prim::Constant pnnx_19000 0 1 26014 value=1 prim::Constant pnnx_19001 0 1 26015 value=-1 prim::ListConstruct pnnx_19002 4 1 26014 1633 1873 26015 17674 torch.permute torch.permute_2903 2 1 x3.117 17671 17672 $input=x3.117 $dims=17671 #x3.117=(1,6,6,8,8,192)f32 #17672=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_283 1 1 17672 17673 memory_format=torch.contiguous_format $input=17672 #17672=(1,6,8,6,8,192)f32 #17673=(1,6,8,6,8,192)f32 prim::Constant pnnx_19004 0 1 26016 value=4 prim::Constant pnnx_19005 0 1 26017 value=4 prim::ListConstruct pnnx_19006 2 1 26016 26017 17676 prim::Constant pnnx_19007 0 1 26018 value=1 prim::Constant pnnx_19008 0 1 26019 value=2 prim::ListConstruct pnnx_19009 2 1 26018 26019 17677 Tensor.view Tensor.view_2023 2 1 17673 17674 shifted_x.95 $input=17673 $shape=17674 #17673=(1,6,8,6,8,192)f32 #shifted_x.95=(1,48,48,192)f32 aten::mul pnnx_19011 2 1 H1.1 W1.1 17679 aten::Int pnnx_19012 1 1 17679 17680 prim::ListConstruct pnnx_19013 3 1 17547 17680 17551 17681 prim::Constant pnnx_19015 0 1 17683 value=None prim::Constant pnnx_19016 0 1 26020 value=1 torch.roll torch.roll_2513 3 1 shifted_x.95 17676 17677 x4.117 $input=shifted_x.95 $shifts=17676 $dims=17677 #shifted_x.95=(1,48,48,192)f32 #x4.117=(1,48,48,192)f32 Tensor.view Tensor.view_2024 2 1 x4.117 17681 x5.95 $input=x4.117 $shape=17681 #x4.117=(1,48,48,192)f32 #x5.95=(1,2304,192)f32 aten::add pnnx_19017 3 1 17526 x5.95 26020 input.425 #17526=(1,2304,192)f32 #x5.95=(1,2304,192)f32 #input.425=(1,2304,192)f32 nn.LayerNorm layers_mmsa.3.residual_group.blocks.3.norm2 1 1 input.425 17685 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.425=(1,2304,192)f32 #17685=(1,2304,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.3.mlp.fc1 1 1 17685 17690 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #17685=(1,2304,192)f32 #17690=(1,2304,384)f32 nn.GELU layers_mmsa.3.residual_group.blocks.3.mlp.act 1 1 17690 17691 #17690=(1,2304,384)f32 #17691=(1,2304,384)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.3.mlp.drop 1 1 17691 17692 #17691=(1,2304,384)f32 #17692=(1,2304,384)f32 nn.Linear layers_mmsa.3.residual_group.blocks.3.mlp.fc2 1 1 17692 17693 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #17692=(1,2304,384)f32 #17693=(1,2304,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.3.mlp.drop 1 1 17693 17694 #17693=(1,2304,192)f32 #17694=(1,2304,192)f32 prim::Constant pnnx_19018 0 1 17695 value=None prim::Constant pnnx_19019 0 1 26021 value=1 aten::add pnnx_19020 3 1 input.425 17694 26021 17696 #input.425=(1,2304,192)f32 #17694=(1,2304,192)f32 #17696=(1,2304,192)f32 prim::Constant pnnx_19021 0 1 17697 value=trunc prim::Constant pnnx_19022 0 1 17698 value=8 prim::Constant pnnx_19023 0 1 17699 value=0 prim::Constant pnnx_19024 0 1 17700 value=2 prim::Constant pnnx_19025 0 1 17701 value=1 prim::Constant pnnx_19026 0 1 17702 value=3 prim::Constant pnnx_19027 0 1 17703 value=8 prim::Constant pnnx_19028 0 1 17704 value=4 prim::Constant pnnx_19029 0 1 17705 value=5 prim::Constant pnnx_19030 0 1 17706 value=-1 prim::Constant pnnx_19031 0 1 17707 value=64 aten::size pnnx_19032 2 1 17696 17699 17713 #17696=(1,2304,192)f32 prim::NumToTensor pnnx_19033 1 1 17713 B.227 aten::Int pnnx_19034 1 1 B.227 17715 aten::Int pnnx_19035 1 1 B.227 17716 aten::size pnnx_19036 2 1 17696 17700 17717 #17696=(1,2304,192)f32 prim::NumToTensor pnnx_19037 1 1 17717 C.387 aten::Int pnnx_19038 1 1 C.387 17719 aten::Int pnnx_19039 1 1 C.387 17720 aten::Int pnnx_19040 1 1 C.387 17721 aten::Int pnnx_19041 1 1 C.387 17722 nn.LayerNorm layers_mmsa.3.residual_group.blocks.4.norm1 1 1 17696 17723 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #17696=(1,2304,192)f32 #17723=(1,2304,192)f32 prim::ListConstruct pnnx_19042 4 1 17716 1630 1870 17722 17724 prim::Constant pnnx_19044 0 1 26022 value=0 Tensor.view Tensor.view_2025 2 1 17723 17724 x.191 $input=17723 $shape=17724 #17723=(1,2304,192)f32 #x.191=(1,48,48,192)f32 aten::size pnnx_19045 2 1 x.191 26022 17726 #x.191=(1,48,48,192)f32 prim::NumToTensor pnnx_19046 1 1 17726 B0.119 aten::Int pnnx_19047 1 1 B0.119 17728 aten::size pnnx_19048 2 1 x.191 17701 17729 #x.191=(1,48,48,192)f32 prim::NumToTensor pnnx_19049 1 1 17729 17730 prim::Constant pnnx_19050 0 1 26023 value=2 aten::size pnnx_19051 2 1 x.191 26023 17731 #x.191=(1,48,48,192)f32 prim::NumToTensor pnnx_19052 1 1 17731 17732 aten::size pnnx_19053 2 1 x.191 17702 17733 #x.191=(1,48,48,192)f32 prim::NumToTensor pnnx_19054 1 1 17733 C0.119 aten::Int pnnx_19055 1 1 C0.119 17735 aten::Int pnnx_19056 1 1 C0.119 17736 aten::div pnnx_19057 3 1 17730 17698 17697 17737 aten::Int pnnx_19058 1 1 17737 17738 prim::Constant pnnx_19059 0 1 26024 value=8 prim::Constant pnnx_19060 0 1 26025 value=trunc aten::div pnnx_19061 3 1 17732 26024 26025 17739 aten::Int pnnx_19062 1 1 17739 17740 prim::Constant pnnx_19063 0 1 26026 value=8 prim::ListConstruct pnnx_19064 6 1 17728 17738 17703 17740 26026 17736 17741 prim::Constant pnnx_19066 0 1 26027 value=0 prim::Constant pnnx_19067 0 1 26028 value=1 prim::Constant pnnx_19068 0 1 26029 value=3 prim::Constant pnnx_19069 0 1 26030 value=2 prim::ListConstruct pnnx_19070 6 1 26027 26028 26029 26030 17704 17705 17743 Tensor.view Tensor.view_2026 2 1 x.191 17741 x0.119 $input=x.191 $shape=17741 #x.191=(1,48,48,192)f32 #x0.119=(1,6,8,6,8,192)f32 prim::Constant pnnx_19074 0 1 26032 value=8 prim::Constant pnnx_19075 0 1 26033 value=8 prim::ListConstruct pnnx_19076 4 1 17706 26032 26033 17735 17746 torch.permute torch.permute_2904 2 1 x0.119 17743 17744 $input=x0.119 $dims=17743 #x0.119=(1,6,8,6,8,192)f32 #17744=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_284 1 1 17744 17745 memory_format=torch.contiguous_format $input=17744 #17744=(1,6,6,8,8,192)f32 #17745=(1,6,6,8,8,192)f32 prim::Constant pnnx_19078 0 1 26034 value=-1 prim::ListConstruct pnnx_19079 3 1 26034 17707 17721 17748 prim::Constant pnnx_19081 0 1 17750 value=1.767767e-01 prim::Constant pnnx_19082 0 1 17751 value=trunc prim::Constant pnnx_19083 0 1 17752 value=6 prim::Constant pnnx_19084 0 1 17753 value=0 prim::Constant pnnx_19085 0 1 17754 value=1 prim::Constant pnnx_19086 0 1 17755 value=2 prim::Constant pnnx_19087 0 1 17756 value=3 prim::Constant pnnx_19088 0 1 17757 value=6 prim::Constant pnnx_19089 0 1 17758 value=4 prim::Constant pnnx_19090 0 1 17759 value=-2 prim::Constant pnnx_19091 0 1 17760 value=-1 prim::Constant pnnx_19092 0 1 17761 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.4.attn 0 1 relative_position_bias_table.191 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.191=(225,6)f32 pnnx.Attribute layers_mmsa.3.residual_group.blocks.4.attn 0 1 relative_position_index.191 @relative_position_index=(64,64)i64 #relative_position_index.191=(64,64)i64 Tensor.view Tensor.view_2027 2 1 17745 17746 x_windows.191 $input=17745 $shape=17746 #17745=(1,6,6,8,8,192)f32 #x_windows.191=(36,8,8,192)f32 Tensor.view Tensor.view_2028 2 1 x_windows.191 17748 x1.119 $input=x_windows.191 $shape=17748 #x_windows.191=(36,8,8,192)f32 #x1.119=(36,64,192)f32 aten::size pnnx_19093 2 1 x1.119 17753 17769 #x1.119=(36,64,192)f32 prim::NumToTensor pnnx_19094 1 1 17769 B_.191 aten::Int pnnx_19095 1 1 B_.191 17771 aten::Int pnnx_19096 1 1 B_.191 17772 aten::size pnnx_19097 2 1 x1.119 17754 17773 #x1.119=(36,64,192)f32 prim::NumToTensor pnnx_19098 1 1 17773 N.191 aten::Int pnnx_19099 1 1 N.191 17775 aten::Int pnnx_19100 1 1 N.191 17776 aten::size pnnx_19101 2 1 x1.119 17755 17777 #x1.119=(36,64,192)f32 prim::NumToTensor pnnx_19102 1 1 17777 C.389 aten::Int pnnx_19103 1 1 C.389 17779 nn.Linear layers_mmsa.3.residual_group.blocks.4.attn.qkv 1 1 x1.119 17780 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.119=(36,64,192)f32 #17780=(36,64,576)f32 aten::div pnnx_19104 3 1 C.389 17752 17751 17781 aten::Int pnnx_19105 1 1 17781 17782 prim::ListConstruct pnnx_19106 5 1 17772 17776 17756 17757 17782 17783 prim::Constant pnnx_19108 0 1 26035 value=2 prim::Constant pnnx_19109 0 1 26036 value=0 prim::Constant pnnx_19110 0 1 26037 value=3 prim::Constant pnnx_19111 0 1 26038 value=1 prim::ListConstruct pnnx_19112 5 1 26035 26036 26037 26038 17758 17785 Tensor.reshape Tensor.reshape_622 2 1 17780 17783 17784 $input=17780 $shape=17783 #17780=(36,64,576)f32 #17784=(36,64,3,6,32)f32 prim::Constant pnnx_19114 0 1 26039 value=0 prim::Constant pnnx_19115 0 1 26040 value=0 prim::Constant pnnx_19117 0 1 26041 value=0 prim::Constant pnnx_19118 0 1 26042 value=1 prim::Constant pnnx_19120 0 1 26043 value=0 prim::Constant pnnx_19121 0 1 26044 value=2 torch.permute torch.permute_2905 2 1 17784 17785 qkv0.119 $input=17784 $dims=17785 #17784=(36,64,3,6,32)f32 #qkv0.119=(3,36,6,64,32)f32 Tensor.select Tensor.select_932 3 1 qkv0.119 26039 26040 q.191 $input=qkv0.119 $dim=26039 $index=26040 #qkv0.119=(3,36,6,64,32)f32 #q.191=(36,6,64,32)f32 aten::mul pnnx_19123 2 1 q.191 17750 q0.119 #q.191=(36,6,64,32)f32 #q0.119=(36,6,64,32)f32 Tensor.select Tensor.select_933 3 1 qkv0.119 26041 26042 k.191 $input=qkv0.119 $dim=26041 $index=26042 #qkv0.119=(3,36,6,64,32)f32 #k.191=(36,6,64,32)f32 prim::Constant pnnx_19126 0 1 26045 value=-1 prim::ListConstruct pnnx_19127 1 1 26045 17793 Tensor.view Tensor.view_2029 2 1 relative_position_index.191 17793 17794 $input=relative_position_index.191 $shape=17793 #relative_position_index.191=(64,64)i64 #17794=(4096)i64 prim::ListConstruct pnnx_19129 1 1 17794 17795 #17794=(4096)i64 prim::Constant pnnx_19131 0 1 26046 value=64 prim::Constant pnnx_19132 0 1 26047 value=-1 prim::ListConstruct pnnx_19133 3 1 17761 26046 26047 17797 Tensor.index Tensor.index_420 2 1 relative_position_bias_table.191 17795 17796 $input=relative_position_bias_table.191 $expr=17795 #relative_position_bias_table.191=(225,6)f32 #17796=(4096,6)f32 prim::Constant pnnx_19135 0 1 26048 value=2 prim::Constant pnnx_19136 0 1 26049 value=0 prim::Constant pnnx_19137 0 1 26050 value=1 prim::ListConstruct pnnx_19138 3 1 26048 26049 26050 17799 Tensor.view Tensor.view_2030 2 1 17796 17797 relative_position_bias.191 $input=17796 $shape=17797 #17796=(4096,6)f32 #relative_position_bias.191=(64,64,6)f32 prim::Constant pnnx_19142 0 1 26052 value=0 torch.permute torch.permute_2906 2 1 relative_position_bias.191 17799 17800 $input=relative_position_bias.191 $dims=17799 #relative_position_bias.191=(64,64,6)f32 #17800=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_285 1 1 17800 relative_position_bias0.119 memory_format=torch.contiguous_format $input=17800 #17800=(6,64,64)f32 #relative_position_bias0.119=(6,64,64)f32 prim::Constant pnnx_19144 0 1 26053 value=1 torch.transpose torch.transpose_3183 3 1 k.191 17759 17760 17791 $input=k.191 $dim0=17759 $dim1=17760 #k.191=(36,6,64,32)f32 #17791=(36,6,32,64)f32 torch.matmul torch.matmul_2392 2 1 q0.119 17791 attn.383 $input=q0.119 $other=17791 #q0.119=(36,6,64,32)f32 #17791=(36,6,32,64)f32 #attn.383=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3406 2 1 relative_position_bias0.119 26052 17802 $input=relative_position_bias0.119 $dim=26052 #relative_position_bias0.119=(6,64,64)f32 #17802=(1,6,64,64)f32 aten::add pnnx_19145 3 1 attn.383 17802 26053 input.427 #attn.383=(36,6,64,64)f32 #17802=(1,6,64,64)f32 #input.427=(36,6,64,64)f32 nn.Softmax layers_mmsa.3.residual_group.blocks.4.attn.softmax 1 1 input.427 17804 dim=-1 #input.427=(36,6,64,64)f32 #17804=(36,6,64,64)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.4.attn.attn_drop 1 1 17804 17805 #17804=(36,6,64,64)f32 #17805=(36,6,64,64)f32 Tensor.select Tensor.select_934 3 1 qkv0.119 26043 26044 v.191 $input=qkv0.119 $dim=26043 $index=26044 #qkv0.119=(3,36,6,64,32)f32 #v.191=(36,6,64,32)f32 prim::Constant pnnx_19147 0 1 26054 value=1 prim::Constant pnnx_19148 0 1 26055 value=2 torch.matmul torch.matmul_2393 2 1 17805 v.191 17806 $input=17805 $other=v.191 #17805=(36,6,64,64)f32 #v.191=(36,6,64,32)f32 #17806=(36,6,64,32)f32 prim::ListConstruct pnnx_19150 3 1 17771 17775 17779 17808 torch.transpose torch.transpose_3184 3 1 17806 26054 26055 17807 $input=17806 $dim0=26054 $dim1=26055 #17806=(36,6,64,32)f32 #17807=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_623 2 1 17807 17808 input0.123 $input=17807 $shape=17808 #17807=(36,64,6,32)f32 #input0.123=(36,64,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.4.attn.proj 1 1 input0.123 17810 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.123=(36,64,192)f32 #17810=(36,64,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.4.attn.proj_drop 1 1 17810 17811 #17810=(36,64,192)f32 #17811=(36,64,192)f32 prim::Constant pnnx_19152 0 1 26056 value=-1 prim::Constant pnnx_19153 0 1 26057 value=8 prim::Constant pnnx_19154 0 1 26058 value=8 prim::ListConstruct pnnx_19155 4 1 26056 26057 26058 17720 17812 prim::Constant pnnx_19157 0 1 26059 value=8 prim::Constant pnnx_19158 0 1 26060 value=trunc aten::div pnnx_19159 3 1 H1.1 26059 26060 17814 aten::Int pnnx_19160 1 1 17814 17815 prim::Constant pnnx_19161 0 1 26061 value=8 prim::Constant pnnx_19162 0 1 26062 value=trunc aten::div pnnx_19163 3 1 W1.1 26061 26062 17816 aten::Int pnnx_19164 1 1 17816 17817 prim::Constant pnnx_19165 0 1 26063 value=1 prim::Constant pnnx_19166 0 1 26064 value=8 prim::Constant pnnx_19167 0 1 26065 value=8 prim::Constant pnnx_19168 0 1 26066 value=-1 prim::ListConstruct pnnx_19169 6 1 26063 17815 17817 26064 26065 26066 17818 prim::Constant pnnx_19171 0 1 26067 value=0 prim::Constant pnnx_19172 0 1 26068 value=1 prim::Constant pnnx_19173 0 1 26069 value=3 prim::Constant pnnx_19174 0 1 26070 value=2 prim::Constant pnnx_19175 0 1 26071 value=4 prim::Constant pnnx_19176 0 1 26072 value=5 prim::ListConstruct pnnx_19177 6 1 26067 26068 26069 26070 26071 26072 17820 Tensor.view Tensor.view_2031 2 1 17811 17812 windows.191 $input=17811 $shape=17812 #17811=(36,64,192)f32 #windows.191=(36,8,8,192)f32 Tensor.view Tensor.view_2032 2 1 windows.191 17818 x2.119 $input=windows.191 $shape=17818 #windows.191=(36,8,8,192)f32 #x2.119=(1,6,6,8,8,192)f32 prim::Constant pnnx_19181 0 1 26074 value=1 prim::Constant pnnx_19182 0 1 26075 value=-1 prim::ListConstruct pnnx_19183 4 1 26074 1627 1867 26075 17823 torch.permute torch.permute_2907 2 1 x2.119 17820 17821 $input=x2.119 $dims=17820 #x2.119=(1,6,6,8,8,192)f32 #17821=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_286 1 1 17821 17822 memory_format=torch.contiguous_format $input=17821 #17821=(1,6,8,6,8,192)f32 #17822=(1,6,8,6,8,192)f32 aten::mul pnnx_19185 2 1 H1.1 W1.1 17825 aten::Int pnnx_19186 1 1 17825 17826 prim::ListConstruct pnnx_19187 3 1 17715 17826 17719 17827 prim::Constant pnnx_19189 0 1 17829 value=None prim::Constant pnnx_19190 0 1 26076 value=1 Tensor.view Tensor.view_2033 2 1 17822 17823 x3.119 $input=17822 $shape=17823 #17822=(1,6,8,6,8,192)f32 #x3.119=(1,48,48,192)f32 Tensor.view Tensor.view_2034 2 1 x3.119 17827 x4.119 $input=x3.119 $shape=17827 #x3.119=(1,48,48,192)f32 #x4.119=(1,2304,192)f32 aten::add pnnx_19191 3 1 17696 x4.119 26076 input.429 #17696=(1,2304,192)f32 #x4.119=(1,2304,192)f32 #input.429=(1,2304,192)f32 nn.LayerNorm layers_mmsa.3.residual_group.blocks.4.norm2 1 1 input.429 17831 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.429=(1,2304,192)f32 #17831=(1,2304,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.4.mlp.fc1 1 1 17831 17836 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #17831=(1,2304,192)f32 #17836=(1,2304,384)f32 nn.GELU layers_mmsa.3.residual_group.blocks.4.mlp.act 1 1 17836 17837 #17836=(1,2304,384)f32 #17837=(1,2304,384)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.4.mlp.drop 1 1 17837 17838 #17837=(1,2304,384)f32 #17838=(1,2304,384)f32 nn.Linear layers_mmsa.3.residual_group.blocks.4.mlp.fc2 1 1 17838 17839 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #17838=(1,2304,384)f32 #17839=(1,2304,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.4.mlp.drop 1 1 17839 17840 #17839=(1,2304,192)f32 #17840=(1,2304,192)f32 prim::Constant pnnx_19192 0 1 17841 value=None prim::Constant pnnx_19193 0 1 26077 value=1 aten::add pnnx_19194 3 1 input.429 17840 26077 17842 #input.429=(1,2304,192)f32 #17840=(1,2304,192)f32 #17842=(1,2304,192)f32 prim::Constant pnnx_19195 0 1 17843 value=trunc prim::Constant pnnx_19196 0 1 17844 value=8 prim::Constant pnnx_19197 0 1 17845 value=0 prim::Constant pnnx_19198 0 1 17846 value=2 prim::Constant pnnx_19199 0 1 17847 value=-4 prim::Constant pnnx_19200 0 1 17848 value=1 prim::Constant pnnx_19201 0 1 17849 value=3 prim::Constant pnnx_19202 0 1 17850 value=8 prim::Constant pnnx_19203 0 1 17851 value=4 prim::Constant pnnx_19204 0 1 17852 value=5 prim::Constant pnnx_19205 0 1 17853 value=-1 prim::Constant pnnx_19206 0 1 17854 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.5 0 1 attn_mask.97 @attn_mask=(36,64,64)f32 #attn_mask.97=(36,64,64)f32 aten::size pnnx_19207 2 1 17842 17845 17861 #17842=(1,2304,192)f32 prim::NumToTensor pnnx_19208 1 1 17861 B.229 aten::Int pnnx_19209 1 1 B.229 17863 aten::Int pnnx_19210 1 1 B.229 17864 aten::size pnnx_19211 2 1 17842 17846 17865 #17842=(1,2304,192)f32 prim::NumToTensor pnnx_19212 1 1 17865 C.391 aten::Int pnnx_19213 1 1 C.391 17867 aten::Int pnnx_19214 1 1 C.391 17868 aten::Int pnnx_19215 1 1 C.391 17869 aten::Int pnnx_19216 1 1 C.391 17870 nn.LayerNorm layers_mmsa.3.residual_group.blocks.5.norm1 1 1 17842 17871 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #17842=(1,2304,192)f32 #17871=(1,2304,192)f32 prim::ListConstruct pnnx_19217 4 1 17864 1624 1864 17870 17872 prim::Constant pnnx_19219 0 1 26078 value=-4 prim::ListConstruct pnnx_19220 2 1 17847 26078 17874 prim::Constant pnnx_19221 0 1 26079 value=2 prim::ListConstruct pnnx_19222 2 1 17848 26079 17875 Tensor.view Tensor.view_2035 2 1 17871 17872 x.193 $input=17871 $shape=17872 #17871=(1,2304,192)f32 #x.193=(1,48,48,192)f32 prim::Constant pnnx_19224 0 1 26080 value=0 torch.roll torch.roll_2514 3 1 x.193 17874 17875 x0.121 $input=x.193 $shifts=17874 $dims=17875 #x.193=(1,48,48,192)f32 #x0.121=(1,48,48,192)f32 aten::size pnnx_19225 2 1 x0.121 26080 17877 #x0.121=(1,48,48,192)f32 prim::NumToTensor pnnx_19226 1 1 17877 B0.121 aten::Int pnnx_19227 1 1 B0.121 17879 prim::Constant pnnx_19228 0 1 26081 value=1 aten::size pnnx_19229 2 1 x0.121 26081 17880 #x0.121=(1,48,48,192)f32 prim::NumToTensor pnnx_19230 1 1 17880 17881 prim::Constant pnnx_19231 0 1 26082 value=2 aten::size pnnx_19232 2 1 x0.121 26082 17882 #x0.121=(1,48,48,192)f32 prim::NumToTensor pnnx_19233 1 1 17882 17883 aten::size pnnx_19234 2 1 x0.121 17849 17884 #x0.121=(1,48,48,192)f32 prim::NumToTensor pnnx_19235 1 1 17884 C0.121 aten::Int pnnx_19236 1 1 C0.121 17886 aten::Int pnnx_19237 1 1 C0.121 17887 aten::div pnnx_19238 3 1 17881 17844 17843 17888 aten::Int pnnx_19239 1 1 17888 17889 prim::Constant pnnx_19240 0 1 26083 value=8 prim::Constant pnnx_19241 0 1 26084 value=trunc aten::div pnnx_19242 3 1 17883 26083 26084 17890 aten::Int pnnx_19243 1 1 17890 17891 prim::Constant pnnx_19244 0 1 26085 value=8 prim::ListConstruct pnnx_19245 6 1 17879 17889 17850 17891 26085 17887 17892 prim::Constant pnnx_19247 0 1 26086 value=0 prim::Constant pnnx_19248 0 1 26087 value=1 prim::Constant pnnx_19249 0 1 26088 value=3 prim::Constant pnnx_19250 0 1 26089 value=2 prim::ListConstruct pnnx_19251 6 1 26086 26087 26088 26089 17851 17852 17894 Tensor.view Tensor.view_2036 2 1 x0.121 17892 x1.121 $input=x0.121 $shape=17892 #x0.121=(1,48,48,192)f32 #x1.121=(1,6,8,6,8,192)f32 prim::Constant pnnx_19255 0 1 26091 value=8 prim::Constant pnnx_19256 0 1 26092 value=8 prim::ListConstruct pnnx_19257 4 1 17853 26091 26092 17886 17897 torch.permute torch.permute_2908 2 1 x1.121 17894 17895 $input=x1.121 $dims=17894 #x1.121=(1,6,8,6,8,192)f32 #17895=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_287 1 1 17895 17896 memory_format=torch.contiguous_format $input=17895 #17895=(1,6,6,8,8,192)f32 #17896=(1,6,6,8,8,192)f32 prim::Constant pnnx_19259 0 1 26093 value=-1 prim::ListConstruct pnnx_19260 3 1 26093 17854 17869 17899 prim::Constant pnnx_19262 0 1 17901 value=1.767767e-01 prim::Constant pnnx_19263 0 1 17902 value=trunc prim::Constant pnnx_19264 0 1 17903 value=6 prim::Constant pnnx_19265 0 1 17904 value=0 prim::Constant pnnx_19266 0 1 17905 value=1 prim::Constant pnnx_19267 0 1 17906 value=2 prim::Constant pnnx_19268 0 1 17907 value=3 prim::Constant pnnx_19269 0 1 17908 value=6 prim::Constant pnnx_19270 0 1 17909 value=4 prim::Constant pnnx_19271 0 1 17910 value=-2 prim::Constant pnnx_19272 0 1 17911 value=-1 prim::Constant pnnx_19273 0 1 17912 value=64 pnnx.Attribute layers_mmsa.3.residual_group.blocks.5.attn 0 1 relative_position_bias_table.193 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.193=(225,6)f32 pnnx.Attribute layers_mmsa.3.residual_group.blocks.5.attn 0 1 relative_position_index.193 @relative_position_index=(64,64)i64 #relative_position_index.193=(64,64)i64 Tensor.view Tensor.view_2037 2 1 17896 17897 x_windows.193 $input=17896 $shape=17897 #17896=(1,6,6,8,8,192)f32 #x_windows.193=(36,8,8,192)f32 Tensor.view Tensor.view_2038 2 1 x_windows.193 17899 x2.121 $input=x_windows.193 $shape=17899 #x_windows.193=(36,8,8,192)f32 #x2.121=(36,64,192)f32 aten::size pnnx_19274 2 1 x2.121 17904 17920 #x2.121=(36,64,192)f32 prim::NumToTensor pnnx_19275 1 1 17920 B_.193 aten::Int pnnx_19276 1 1 B_.193 17922 aten::Int pnnx_19277 1 1 B_.193 17923 aten::size pnnx_19278 2 1 x2.121 17905 17924 #x2.121=(36,64,192)f32 prim::NumToTensor pnnx_19279 1 1 17924 N.193 aten::Int pnnx_19280 1 1 N.193 17926 aten::Int pnnx_19281 1 1 N.193 17927 aten::Int pnnx_19282 1 1 N.193 17928 aten::Int pnnx_19283 1 1 N.193 17929 aten::Int pnnx_19284 1 1 N.193 17930 aten::Int pnnx_19285 1 1 N.193 17931 aten::size pnnx_19286 2 1 x2.121 17906 17932 #x2.121=(36,64,192)f32 prim::NumToTensor pnnx_19287 1 1 17932 C.393 aten::Int pnnx_19288 1 1 C.393 17934 nn.Linear layers_mmsa.3.residual_group.blocks.5.attn.qkv 1 1 x2.121 17935 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.121=(36,64,192)f32 #17935=(36,64,576)f32 aten::div pnnx_19289 3 1 C.393 17903 17902 17936 aten::Int pnnx_19290 1 1 17936 17937 prim::ListConstruct pnnx_19291 5 1 17923 17931 17907 17908 17937 17938 prim::Constant pnnx_19293 0 1 26094 value=2 prim::Constant pnnx_19294 0 1 26095 value=0 prim::Constant pnnx_19295 0 1 26096 value=3 prim::Constant pnnx_19296 0 1 26097 value=1 prim::ListConstruct pnnx_19297 5 1 26094 26095 26096 26097 17909 17940 Tensor.reshape Tensor.reshape_624 2 1 17935 17938 17939 $input=17935 $shape=17938 #17935=(36,64,576)f32 #17939=(36,64,3,6,32)f32 prim::Constant pnnx_19299 0 1 26098 value=0 prim::Constant pnnx_19300 0 1 26099 value=0 prim::Constant pnnx_19302 0 1 26100 value=0 prim::Constant pnnx_19303 0 1 26101 value=1 prim::Constant pnnx_19305 0 1 26102 value=0 prim::Constant pnnx_19306 0 1 26103 value=2 torch.permute torch.permute_2909 2 1 17939 17940 qkv0.121 $input=17939 $dims=17940 #17939=(36,64,3,6,32)f32 #qkv0.121=(3,36,6,64,32)f32 Tensor.select Tensor.select_935 3 1 qkv0.121 26098 26099 q.193 $input=qkv0.121 $dim=26098 $index=26099 #qkv0.121=(3,36,6,64,32)f32 #q.193=(36,6,64,32)f32 aten::mul pnnx_19308 2 1 q.193 17901 q0.121 #q.193=(36,6,64,32)f32 #q0.121=(36,6,64,32)f32 Tensor.select Tensor.select_936 3 1 qkv0.121 26100 26101 k.193 $input=qkv0.121 $dim=26100 $index=26101 #qkv0.121=(3,36,6,64,32)f32 #k.193=(36,6,64,32)f32 prim::Constant pnnx_19311 0 1 26104 value=-1 prim::ListConstruct pnnx_19312 1 1 26104 17948 Tensor.view Tensor.view_2039 2 1 relative_position_index.193 17948 17949 $input=relative_position_index.193 $shape=17948 #relative_position_index.193=(64,64)i64 #17949=(4096)i64 prim::ListConstruct pnnx_19314 1 1 17949 17950 #17949=(4096)i64 prim::Constant pnnx_19316 0 1 26105 value=64 prim::Constant pnnx_19317 0 1 26106 value=-1 prim::ListConstruct pnnx_19318 3 1 17912 26105 26106 17952 Tensor.index Tensor.index_421 2 1 relative_position_bias_table.193 17950 17951 $input=relative_position_bias_table.193 $expr=17950 #relative_position_bias_table.193=(225,6)f32 #17951=(4096,6)f32 prim::Constant pnnx_19320 0 1 26107 value=2 prim::Constant pnnx_19321 0 1 26108 value=0 prim::Constant pnnx_19322 0 1 26109 value=1 prim::ListConstruct pnnx_19323 3 1 26107 26108 26109 17954 Tensor.view Tensor.view_2040 2 1 17951 17952 relative_position_bias.193 $input=17951 $shape=17952 #17951=(4096,6)f32 #relative_position_bias.193=(64,64,6)f32 prim::Constant pnnx_19327 0 1 26111 value=0 torch.permute torch.permute_2910 2 1 relative_position_bias.193 17954 17955 $input=relative_position_bias.193 $dims=17954 #relative_position_bias.193=(64,64,6)f32 #17955=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_288 1 1 17955 relative_position_bias0.121 memory_format=torch.contiguous_format $input=17955 #17955=(6,64,64)f32 #relative_position_bias0.121=(6,64,64)f32 prim::Constant pnnx_19329 0 1 26112 value=1 torch.transpose torch.transpose_3185 3 1 k.193 17910 17911 17946 $input=k.193 $dim0=17910 $dim1=17911 #k.193=(36,6,64,32)f32 #17946=(36,6,32,64)f32 torch.matmul torch.matmul_2394 2 1 q0.121 17946 attn.387 $input=q0.121 $other=17946 #q0.121=(36,6,64,32)f32 #17946=(36,6,32,64)f32 #attn.387=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3407 2 1 relative_position_bias0.121 26111 17957 $input=relative_position_bias0.121 $dim=26111 #relative_position_bias0.121=(6,64,64)f32 #17957=(1,6,64,64)f32 aten::add pnnx_19330 3 1 attn.387 17957 26112 attn0.61 #attn.387=(36,6,64,64)f32 #17957=(1,6,64,64)f32 #attn0.61=(36,6,64,64)f32 prim::Constant pnnx_19331 0 1 26113 value=0 aten::size pnnx_19332 2 1 attn_mask.97 26113 17959 #attn_mask.97=(36,64,64)f32 prim::NumToTensor pnnx_19333 1 1 17959 other.97 aten::Int pnnx_19334 1 1 other.97 17961 prim::Constant pnnx_19335 0 1 26114 value=trunc aten::div pnnx_19336 3 1 B_.193 other.97 26114 17962 aten::Int pnnx_19337 1 1 17962 17963 prim::Constant pnnx_19338 0 1 26115 value=6 prim::ListConstruct pnnx_19339 5 1 17963 17961 26115 17930 17929 17964 prim::Constant pnnx_19341 0 1 26116 value=1 prim::Constant pnnx_19343 0 1 26117 value=0 prim::Constant pnnx_19345 0 1 26118 value=1 Tensor.view Tensor.view_2041 2 1 attn0.61 17964 17965 $input=attn0.61 $shape=17964 #attn0.61=(36,6,64,64)f32 #17965=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3408 2 1 attn_mask.97 26116 17966 $input=attn_mask.97 $dim=26116 #attn_mask.97=(36,64,64)f32 #17966=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3409 2 1 17966 26117 17967 $input=17966 $dim=26117 #17966=(36,1,64,64)f32 #17967=(1,36,1,64,64)f32 aten::add pnnx_19346 3 1 17965 17967 26118 attn1.61 #17965=(1,36,6,64,64)f32 #17967=(1,36,1,64,64)f32 #attn1.61=(1,36,6,64,64)f32 prim::Constant pnnx_19347 0 1 26119 value=-1 prim::Constant pnnx_19348 0 1 26120 value=6 prim::ListConstruct pnnx_19349 4 1 26119 26120 17928 17927 17969 Tensor.view Tensor.view_2042 2 1 attn1.61 17969 input.431 $input=attn1.61 $shape=17969 #attn1.61=(1,36,6,64,64)f32 #input.431=(36,6,64,64)f32 nn.Softmax layers_mmsa.3.residual_group.blocks.5.attn.softmax 1 1 input.431 17971 dim=-1 #input.431=(36,6,64,64)f32 #17971=(36,6,64,64)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.5.attn.attn_drop 1 1 17971 17972 #17971=(36,6,64,64)f32 #17972=(36,6,64,64)f32 Tensor.select Tensor.select_937 3 1 qkv0.121 26102 26103 v.193 $input=qkv0.121 $dim=26102 $index=26103 #qkv0.121=(3,36,6,64,32)f32 #v.193=(36,6,64,32)f32 prim::Constant pnnx_19352 0 1 26121 value=1 prim::Constant pnnx_19353 0 1 26122 value=2 torch.matmul torch.matmul_2395 2 1 17972 v.193 17973 $input=17972 $other=v.193 #17972=(36,6,64,64)f32 #v.193=(36,6,64,32)f32 #17973=(36,6,64,32)f32 prim::ListConstruct pnnx_19355 3 1 17922 17926 17934 17975 torch.transpose torch.transpose_3186 3 1 17973 26121 26122 17974 $input=17973 $dim0=26121 $dim1=26122 #17973=(36,6,64,32)f32 #17974=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_625 2 1 17974 17975 input0.125 $input=17974 $shape=17975 #17974=(36,64,6,32)f32 #input0.125=(36,64,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.5.attn.proj 1 1 input0.125 17977 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.125=(36,64,192)f32 #17977=(36,64,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.5.attn.proj_drop 1 1 17977 17978 #17977=(36,64,192)f32 #17978=(36,64,192)f32 prim::Constant pnnx_19357 0 1 26123 value=-1 prim::Constant pnnx_19358 0 1 26124 value=8 prim::Constant pnnx_19359 0 1 26125 value=8 prim::ListConstruct pnnx_19360 4 1 26123 26124 26125 17868 17979 prim::Constant pnnx_19362 0 1 26126 value=8 prim::Constant pnnx_19363 0 1 26127 value=trunc aten::div pnnx_19364 3 1 H1.1 26126 26127 17981 aten::Int pnnx_19365 1 1 17981 17982 prim::Constant pnnx_19366 0 1 26128 value=8 prim::Constant pnnx_19367 0 1 26129 value=trunc aten::div pnnx_19368 3 1 W1.1 26128 26129 17983 aten::Int pnnx_19369 1 1 17983 17984 prim::Constant pnnx_19370 0 1 26130 value=1 prim::Constant pnnx_19371 0 1 26131 value=8 prim::Constant pnnx_19372 0 1 26132 value=8 prim::Constant pnnx_19373 0 1 26133 value=-1 prim::ListConstruct pnnx_19374 6 1 26130 17982 17984 26131 26132 26133 17985 prim::Constant pnnx_19376 0 1 26134 value=0 prim::Constant pnnx_19377 0 1 26135 value=1 prim::Constant pnnx_19378 0 1 26136 value=3 prim::Constant pnnx_19379 0 1 26137 value=2 prim::Constant pnnx_19380 0 1 26138 value=4 prim::Constant pnnx_19381 0 1 26139 value=5 prim::ListConstruct pnnx_19382 6 1 26134 26135 26136 26137 26138 26139 17987 Tensor.view Tensor.view_2043 2 1 17978 17979 windows.193 $input=17978 $shape=17979 #17978=(36,64,192)f32 #windows.193=(36,8,8,192)f32 Tensor.view Tensor.view_2044 2 1 windows.193 17985 x3.121 $input=windows.193 $shape=17985 #windows.193=(36,8,8,192)f32 #x3.121=(1,6,6,8,8,192)f32 prim::Constant pnnx_19386 0 1 26141 value=1 prim::Constant pnnx_19387 0 1 26142 value=-1 prim::ListConstruct pnnx_19388 4 1 26141 1621 1861 26142 17990 torch.permute torch.permute_2911 2 1 x3.121 17987 17988 $input=x3.121 $dims=17987 #x3.121=(1,6,6,8,8,192)f32 #17988=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_289 1 1 17988 17989 memory_format=torch.contiguous_format $input=17988 #17988=(1,6,8,6,8,192)f32 #17989=(1,6,8,6,8,192)f32 prim::Constant pnnx_19390 0 1 26143 value=4 prim::Constant pnnx_19391 0 1 26144 value=4 prim::ListConstruct pnnx_19392 2 1 26143 26144 17992 prim::Constant pnnx_19393 0 1 26145 value=1 prim::Constant pnnx_19394 0 1 26146 value=2 prim::ListConstruct pnnx_19395 2 1 26145 26146 17993 Tensor.view Tensor.view_2045 2 1 17989 17990 shifted_x.97 $input=17989 $shape=17990 #17989=(1,6,8,6,8,192)f32 #shifted_x.97=(1,48,48,192)f32 aten::mul pnnx_19397 2 1 H1.1 W1.1 17995 aten::Int pnnx_19398 1 1 17995 17996 prim::ListConstruct pnnx_19399 3 1 17863 17996 17867 17997 prim::Constant pnnx_19401 0 1 17999 value=None prim::Constant pnnx_19402 0 1 26147 value=1 torch.roll torch.roll_2515 3 1 shifted_x.97 17992 17993 x4.121 $input=shifted_x.97 $shifts=17992 $dims=17993 #shifted_x.97=(1,48,48,192)f32 #x4.121=(1,48,48,192)f32 Tensor.view Tensor.view_2046 2 1 x4.121 17997 x5.97 $input=x4.121 $shape=17997 #x4.121=(1,48,48,192)f32 #x5.97=(1,2304,192)f32 aten::add pnnx_19403 3 1 17842 x5.97 26147 input.433 #17842=(1,2304,192)f32 #x5.97=(1,2304,192)f32 #input.433=(1,2304,192)f32 nn.LayerNorm layers_mmsa.3.residual_group.blocks.5.norm2 1 1 input.433 18001 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.433=(1,2304,192)f32 #18001=(1,2304,192)f32 nn.Linear layers_mmsa.3.residual_group.blocks.5.mlp.fc1 1 1 18001 18006 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18001=(1,2304,192)f32 #18006=(1,2304,384)f32 nn.GELU layers_mmsa.3.residual_group.blocks.5.mlp.act 1 1 18006 18007 #18006=(1,2304,384)f32 #18007=(1,2304,384)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.5.mlp.drop 1 1 18007 18008 #18007=(1,2304,384)f32 #18008=(1,2304,384)f32 nn.Linear layers_mmsa.3.residual_group.blocks.5.mlp.fc2 1 1 18008 18009 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18008=(1,2304,384)f32 #18009=(1,2304,192)f32 nn.Dropout layers_mmsa.3.residual_group.blocks.5.mlp.drop 1 1 18009 18010 #18009=(1,2304,192)f32 #18010=(1,2304,192)f32 prim::Constant pnnx_19404 0 1 18011 value=None prim::Constant pnnx_19405 0 1 26148 value=1 aten::add pnnx_19406 3 1 input.433 18010 26148 18012 #input.433=(1,2304,192)f32 #18010=(1,2304,192)f32 #18012=(1,2304,192)f32 prim::Constant pnnx_19407 0 1 18013 value=0 prim::Constant pnnx_19408 0 1 18014 value=1 prim::Constant pnnx_19409 0 1 18015 value=2 prim::Constant pnnx_19410 0 1 18016 value=192 aten::size pnnx_19411 2 1 18012 18013 18017 #18012=(1,2304,192)f32 prim::NumToTensor pnnx_19412 1 1 18017 B.231 aten::Int pnnx_19413 1 1 B.231 18019 prim::ListConstruct pnnx_19415 4 1 18019 18016 1618 1858 18021 torch.transpose torch.transpose_3187 3 1 18012 18014 18015 18020 $input=18012 $dim0=18014 $dim1=18015 #18012=(1,2304,192)f32 #18020=(1,192,2304)f32 Tensor.view Tensor.view_2047 2 1 18020 18021 input.435 $input=18020 $shape=18021 #18020=(1,192,2304)f32 #input.435=(1,192,48,48)f32 nn.Conv2d layers_mmsa.3.conv 1 1 input.435 18023 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.435=(1,192,48,48)f32 #18023=(1,192,48,48)f32 prim::Constant pnnx_19417 0 1 18024 value=-1 prim::Constant pnnx_19418 0 1 18025 value=2 prim::Constant pnnx_19419 0 1 18026 value=1 prim::Constant pnnx_19421 0 1 26149 value=2 torch.flatten torch.flatten_2201 3 1 18023 18025 18024 18027 $input=18023 $start_dim=18025 $end_dim=18024 #18023=(1,192,48,48)f32 #18027=(1,192,2304)f32 torch.transpose torch.transpose_3188 3 1 18027 18026 26149 18028 $input=18027 $dim0=18026 $dim1=26149 #18027=(1,192,2304)f32 #18028=(1,2304,192)f32 aten::add pnnx_19423 3 1 18028 17047 17048 18029 #18028=(1,2304,192)f32 #17047=(1,2304,192)f32 #18029=(1,2304,192)f32 prim::Constant pnnx_19424 0 1 18030 value=1 prim::Constant pnnx_19425 0 1 18047 value=trunc prim::Constant pnnx_19426 0 1 18048 value=8 prim::Constant pnnx_19427 0 1 18049 value=0 prim::Constant pnnx_19428 0 1 18050 value=2 prim::Constant pnnx_19429 0 1 18051 value=1 prim::Constant pnnx_19430 0 1 18052 value=3 prim::Constant pnnx_19431 0 1 18053 value=8 prim::Constant pnnx_19432 0 1 18054 value=4 prim::Constant pnnx_19433 0 1 18055 value=5 prim::Constant pnnx_19434 0 1 18056 value=-1 prim::Constant pnnx_19435 0 1 18057 value=64 aten::size pnnx_19436 2 1 18029 18049 18063 #18029=(1,2304,192)f32 prim::NumToTensor pnnx_19437 1 1 18063 B.233 aten::Int pnnx_19438 1 1 B.233 18065 aten::Int pnnx_19439 1 1 B.233 18066 aten::size pnnx_19440 2 1 18029 18050 18067 #18029=(1,2304,192)f32 prim::NumToTensor pnnx_19441 1 1 18067 C.395 aten::Int pnnx_19442 1 1 C.395 18069 aten::Int pnnx_19443 1 1 C.395 18070 aten::Int pnnx_19444 1 1 C.395 18071 aten::Int pnnx_19445 1 1 C.395 18072 nn.LayerNorm layers_mmsa.4.residual_group.blocks.0.norm1 1 1 18029 18073 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #18029=(1,2304,192)f32 #18073=(1,2304,192)f32 prim::ListConstruct pnnx_19446 4 1 18066 1615 1855 18072 18074 prim::Constant pnnx_19448 0 1 26150 value=0 Tensor.view Tensor.view_2048 2 1 18073 18074 x.195 $input=18073 $shape=18074 #18073=(1,2304,192)f32 #x.195=(1,48,48,192)f32 aten::size pnnx_19449 2 1 x.195 26150 18076 #x.195=(1,48,48,192)f32 prim::NumToTensor pnnx_19450 1 1 18076 B0.123 aten::Int pnnx_19451 1 1 B0.123 18078 aten::size pnnx_19452 2 1 x.195 18051 18079 #x.195=(1,48,48,192)f32 prim::NumToTensor pnnx_19453 1 1 18079 18080 prim::Constant pnnx_19454 0 1 26151 value=2 aten::size pnnx_19455 2 1 x.195 26151 18081 #x.195=(1,48,48,192)f32 prim::NumToTensor pnnx_19456 1 1 18081 18082 aten::size pnnx_19457 2 1 x.195 18052 18083 #x.195=(1,48,48,192)f32 prim::NumToTensor pnnx_19458 1 1 18083 C0.123 aten::Int pnnx_19459 1 1 C0.123 18085 aten::Int pnnx_19460 1 1 C0.123 18086 aten::div pnnx_19461 3 1 18080 18048 18047 18087 aten::Int pnnx_19462 1 1 18087 18088 prim::Constant pnnx_19463 0 1 26152 value=8 prim::Constant pnnx_19464 0 1 26153 value=trunc aten::div pnnx_19465 3 1 18082 26152 26153 18089 aten::Int pnnx_19466 1 1 18089 18090 prim::Constant pnnx_19467 0 1 26154 value=8 prim::ListConstruct pnnx_19468 6 1 18078 18088 18053 18090 26154 18086 18091 prim::Constant pnnx_19470 0 1 26155 value=0 prim::Constant pnnx_19471 0 1 26156 value=1 prim::Constant pnnx_19472 0 1 26157 value=3 prim::Constant pnnx_19473 0 1 26158 value=2 prim::ListConstruct pnnx_19474 6 1 26155 26156 26157 26158 18054 18055 18093 Tensor.view Tensor.view_2049 2 1 x.195 18091 x0.123 $input=x.195 $shape=18091 #x.195=(1,48,48,192)f32 #x0.123=(1,6,8,6,8,192)f32 prim::Constant pnnx_19478 0 1 26160 value=8 prim::Constant pnnx_19479 0 1 26161 value=8 prim::ListConstruct pnnx_19480 4 1 18056 26160 26161 18085 18096 torch.permute torch.permute_2912 2 1 x0.123 18093 18094 $input=x0.123 $dims=18093 #x0.123=(1,6,8,6,8,192)f32 #18094=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_290 1 1 18094 18095 memory_format=torch.contiguous_format $input=18094 #18094=(1,6,6,8,8,192)f32 #18095=(1,6,6,8,8,192)f32 prim::Constant pnnx_19482 0 1 26162 value=-1 prim::ListConstruct pnnx_19483 3 1 26162 18057 18071 18098 prim::Constant pnnx_19485 0 1 18100 value=1.767767e-01 prim::Constant pnnx_19486 0 1 18101 value=trunc prim::Constant pnnx_19487 0 1 18102 value=6 prim::Constant pnnx_19488 0 1 18103 value=0 prim::Constant pnnx_19489 0 1 18104 value=1 prim::Constant pnnx_19490 0 1 18105 value=2 prim::Constant pnnx_19491 0 1 18106 value=3 prim::Constant pnnx_19492 0 1 18107 value=6 prim::Constant pnnx_19493 0 1 18108 value=4 prim::Constant pnnx_19494 0 1 18109 value=-2 prim::Constant pnnx_19495 0 1 18110 value=-1 prim::Constant pnnx_19496 0 1 18111 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.0.attn 0 1 relative_position_bias_table.195 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.195=(225,6)f32 pnnx.Attribute layers_mmsa.4.residual_group.blocks.0.attn 0 1 relative_position_index.195 @relative_position_index=(64,64)i64 #relative_position_index.195=(64,64)i64 Tensor.view Tensor.view_2050 2 1 18095 18096 x_windows.195 $input=18095 $shape=18096 #18095=(1,6,6,8,8,192)f32 #x_windows.195=(36,8,8,192)f32 Tensor.view Tensor.view_2051 2 1 x_windows.195 18098 x1.123 $input=x_windows.195 $shape=18098 #x_windows.195=(36,8,8,192)f32 #x1.123=(36,64,192)f32 aten::size pnnx_19497 2 1 x1.123 18103 18119 #x1.123=(36,64,192)f32 prim::NumToTensor pnnx_19498 1 1 18119 B_.195 aten::Int pnnx_19499 1 1 B_.195 18121 aten::Int pnnx_19500 1 1 B_.195 18122 aten::size pnnx_19501 2 1 x1.123 18104 18123 #x1.123=(36,64,192)f32 prim::NumToTensor pnnx_19502 1 1 18123 N.195 aten::Int pnnx_19503 1 1 N.195 18125 aten::Int pnnx_19504 1 1 N.195 18126 aten::size pnnx_19505 2 1 x1.123 18105 18127 #x1.123=(36,64,192)f32 prim::NumToTensor pnnx_19506 1 1 18127 C.397 aten::Int pnnx_19507 1 1 C.397 18129 nn.Linear layers_mmsa.4.residual_group.blocks.0.attn.qkv 1 1 x1.123 18130 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.123=(36,64,192)f32 #18130=(36,64,576)f32 aten::div pnnx_19508 3 1 C.397 18102 18101 18131 aten::Int pnnx_19509 1 1 18131 18132 prim::ListConstruct pnnx_19510 5 1 18122 18126 18106 18107 18132 18133 prim::Constant pnnx_19512 0 1 26163 value=2 prim::Constant pnnx_19513 0 1 26164 value=0 prim::Constant pnnx_19514 0 1 26165 value=3 prim::Constant pnnx_19515 0 1 26166 value=1 prim::ListConstruct pnnx_19516 5 1 26163 26164 26165 26166 18108 18135 Tensor.reshape Tensor.reshape_626 2 1 18130 18133 18134 $input=18130 $shape=18133 #18130=(36,64,576)f32 #18134=(36,64,3,6,32)f32 prim::Constant pnnx_19518 0 1 26167 value=0 prim::Constant pnnx_19519 0 1 26168 value=0 prim::Constant pnnx_19521 0 1 26169 value=0 prim::Constant pnnx_19522 0 1 26170 value=1 prim::Constant pnnx_19524 0 1 26171 value=0 prim::Constant pnnx_19525 0 1 26172 value=2 torch.permute torch.permute_2913 2 1 18134 18135 qkv0.123 $input=18134 $dims=18135 #18134=(36,64,3,6,32)f32 #qkv0.123=(3,36,6,64,32)f32 Tensor.select Tensor.select_938 3 1 qkv0.123 26167 26168 q.195 $input=qkv0.123 $dim=26167 $index=26168 #qkv0.123=(3,36,6,64,32)f32 #q.195=(36,6,64,32)f32 aten::mul pnnx_19527 2 1 q.195 18100 q0.123 #q.195=(36,6,64,32)f32 #q0.123=(36,6,64,32)f32 Tensor.select Tensor.select_939 3 1 qkv0.123 26169 26170 k.195 $input=qkv0.123 $dim=26169 $index=26170 #qkv0.123=(3,36,6,64,32)f32 #k.195=(36,6,64,32)f32 prim::Constant pnnx_19530 0 1 26173 value=-1 prim::ListConstruct pnnx_19531 1 1 26173 18143 Tensor.view Tensor.view_2052 2 1 relative_position_index.195 18143 18144 $input=relative_position_index.195 $shape=18143 #relative_position_index.195=(64,64)i64 #18144=(4096)i64 prim::ListConstruct pnnx_19533 1 1 18144 18145 #18144=(4096)i64 prim::Constant pnnx_19535 0 1 26174 value=64 prim::Constant pnnx_19536 0 1 26175 value=-1 prim::ListConstruct pnnx_19537 3 1 18111 26174 26175 18147 Tensor.index Tensor.index_422 2 1 relative_position_bias_table.195 18145 18146 $input=relative_position_bias_table.195 $expr=18145 #relative_position_bias_table.195=(225,6)f32 #18146=(4096,6)f32 prim::Constant pnnx_19539 0 1 26176 value=2 prim::Constant pnnx_19540 0 1 26177 value=0 prim::Constant pnnx_19541 0 1 26178 value=1 prim::ListConstruct pnnx_19542 3 1 26176 26177 26178 18149 Tensor.view Tensor.view_2053 2 1 18146 18147 relative_position_bias.195 $input=18146 $shape=18147 #18146=(4096,6)f32 #relative_position_bias.195=(64,64,6)f32 prim::Constant pnnx_19546 0 1 26180 value=0 torch.permute torch.permute_2914 2 1 relative_position_bias.195 18149 18150 $input=relative_position_bias.195 $dims=18149 #relative_position_bias.195=(64,64,6)f32 #18150=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_291 1 1 18150 relative_position_bias0.123 memory_format=torch.contiguous_format $input=18150 #18150=(6,64,64)f32 #relative_position_bias0.123=(6,64,64)f32 prim::Constant pnnx_19548 0 1 26181 value=1 torch.transpose torch.transpose_3189 3 1 k.195 18109 18110 18141 $input=k.195 $dim0=18109 $dim1=18110 #k.195=(36,6,64,32)f32 #18141=(36,6,32,64)f32 torch.matmul torch.matmul_2396 2 1 q0.123 18141 attn.391 $input=q0.123 $other=18141 #q0.123=(36,6,64,32)f32 #18141=(36,6,32,64)f32 #attn.391=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3410 2 1 relative_position_bias0.123 26180 18152 $input=relative_position_bias0.123 $dim=26180 #relative_position_bias0.123=(6,64,64)f32 #18152=(1,6,64,64)f32 aten::add pnnx_19549 3 1 attn.391 18152 26181 input.437 #attn.391=(36,6,64,64)f32 #18152=(1,6,64,64)f32 #input.437=(36,6,64,64)f32 nn.Softmax layers_mmsa.4.residual_group.blocks.0.attn.softmax 1 1 input.437 18154 dim=-1 #input.437=(36,6,64,64)f32 #18154=(36,6,64,64)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.0.attn.attn_drop 1 1 18154 18155 #18154=(36,6,64,64)f32 #18155=(36,6,64,64)f32 Tensor.select Tensor.select_940 3 1 qkv0.123 26171 26172 v.195 $input=qkv0.123 $dim=26171 $index=26172 #qkv0.123=(3,36,6,64,32)f32 #v.195=(36,6,64,32)f32 prim::Constant pnnx_19551 0 1 26182 value=1 prim::Constant pnnx_19552 0 1 26183 value=2 torch.matmul torch.matmul_2397 2 1 18155 v.195 18156 $input=18155 $other=v.195 #18155=(36,6,64,64)f32 #v.195=(36,6,64,32)f32 #18156=(36,6,64,32)f32 prim::ListConstruct pnnx_19554 3 1 18121 18125 18129 18158 torch.transpose torch.transpose_3190 3 1 18156 26182 26183 18157 $input=18156 $dim0=26182 $dim1=26183 #18156=(36,6,64,32)f32 #18157=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_627 2 1 18157 18158 input0.127 $input=18157 $shape=18158 #18157=(36,64,6,32)f32 #input0.127=(36,64,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.0.attn.proj 1 1 input0.127 18160 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.127=(36,64,192)f32 #18160=(36,64,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.0.attn.proj_drop 1 1 18160 18161 #18160=(36,64,192)f32 #18161=(36,64,192)f32 prim::Constant pnnx_19556 0 1 26184 value=-1 prim::Constant pnnx_19557 0 1 26185 value=8 prim::Constant pnnx_19558 0 1 26186 value=8 prim::ListConstruct pnnx_19559 4 1 26184 26185 26186 18070 18162 prim::Constant pnnx_19561 0 1 26187 value=8 prim::Constant pnnx_19562 0 1 26188 value=trunc aten::div pnnx_19563 3 1 H1.1 26187 26188 18164 aten::Int pnnx_19564 1 1 18164 18165 prim::Constant pnnx_19565 0 1 26189 value=8 prim::Constant pnnx_19566 0 1 26190 value=trunc aten::div pnnx_19567 3 1 W1.1 26189 26190 18166 aten::Int pnnx_19568 1 1 18166 18167 prim::Constant pnnx_19569 0 1 26191 value=1 prim::Constant pnnx_19570 0 1 26192 value=8 prim::Constant pnnx_19571 0 1 26193 value=8 prim::Constant pnnx_19572 0 1 26194 value=-1 prim::ListConstruct pnnx_19573 6 1 26191 18165 18167 26192 26193 26194 18168 prim::Constant pnnx_19575 0 1 26195 value=0 prim::Constant pnnx_19576 0 1 26196 value=1 prim::Constant pnnx_19577 0 1 26197 value=3 prim::Constant pnnx_19578 0 1 26198 value=2 prim::Constant pnnx_19579 0 1 26199 value=4 prim::Constant pnnx_19580 0 1 26200 value=5 prim::ListConstruct pnnx_19581 6 1 26195 26196 26197 26198 26199 26200 18170 Tensor.view Tensor.view_2054 2 1 18161 18162 windows.195 $input=18161 $shape=18162 #18161=(36,64,192)f32 #windows.195=(36,8,8,192)f32 Tensor.view Tensor.view_2055 2 1 windows.195 18168 x2.123 $input=windows.195 $shape=18168 #windows.195=(36,8,8,192)f32 #x2.123=(1,6,6,8,8,192)f32 prim::Constant pnnx_19585 0 1 26202 value=1 prim::Constant pnnx_19586 0 1 26203 value=-1 prim::ListConstruct pnnx_19587 4 1 26202 1612 1852 26203 18173 torch.permute torch.permute_2915 2 1 x2.123 18170 18171 $input=x2.123 $dims=18170 #x2.123=(1,6,6,8,8,192)f32 #18171=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_292 1 1 18171 18172 memory_format=torch.contiguous_format $input=18171 #18171=(1,6,8,6,8,192)f32 #18172=(1,6,8,6,8,192)f32 aten::mul pnnx_19589 2 1 H1.1 W1.1 18175 aten::Int pnnx_19590 1 1 18175 18176 prim::ListConstruct pnnx_19591 3 1 18065 18176 18069 18177 prim::Constant pnnx_19593 0 1 18179 value=None prim::Constant pnnx_19594 0 1 26204 value=1 Tensor.view Tensor.view_2056 2 1 18172 18173 x3.123 $input=18172 $shape=18173 #18172=(1,6,8,6,8,192)f32 #x3.123=(1,48,48,192)f32 Tensor.view Tensor.view_2057 2 1 x3.123 18177 x4.123 $input=x3.123 $shape=18177 #x3.123=(1,48,48,192)f32 #x4.123=(1,2304,192)f32 aten::add pnnx_19595 3 1 18029 x4.123 26204 input.439 #18029=(1,2304,192)f32 #x4.123=(1,2304,192)f32 #input.439=(1,2304,192)f32 nn.LayerNorm layers_mmsa.4.residual_group.blocks.0.norm2 1 1 input.439 18181 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.439=(1,2304,192)f32 #18181=(1,2304,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.0.mlp.fc1 1 1 18181 18186 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18181=(1,2304,192)f32 #18186=(1,2304,384)f32 nn.GELU layers_mmsa.4.residual_group.blocks.0.mlp.act 1 1 18186 18187 #18186=(1,2304,384)f32 #18187=(1,2304,384)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.0.mlp.drop 1 1 18187 18188 #18187=(1,2304,384)f32 #18188=(1,2304,384)f32 nn.Linear layers_mmsa.4.residual_group.blocks.0.mlp.fc2 1 1 18188 18189 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18188=(1,2304,384)f32 #18189=(1,2304,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.0.mlp.drop 1 1 18189 18190 #18189=(1,2304,192)f32 #18190=(1,2304,192)f32 prim::Constant pnnx_19596 0 1 18191 value=None prim::Constant pnnx_19597 0 1 26205 value=1 aten::add pnnx_19598 3 1 input.439 18190 26205 18192 #input.439=(1,2304,192)f32 #18190=(1,2304,192)f32 #18192=(1,2304,192)f32 prim::Constant pnnx_19599 0 1 18193 value=trunc prim::Constant pnnx_19600 0 1 18194 value=8 prim::Constant pnnx_19601 0 1 18195 value=0 prim::Constant pnnx_19602 0 1 18196 value=2 prim::Constant pnnx_19603 0 1 18197 value=-4 prim::Constant pnnx_19604 0 1 18198 value=1 prim::Constant pnnx_19605 0 1 18199 value=3 prim::Constant pnnx_19606 0 1 18200 value=8 prim::Constant pnnx_19607 0 1 18201 value=4 prim::Constant pnnx_19608 0 1 18202 value=5 prim::Constant pnnx_19609 0 1 18203 value=-1 prim::Constant pnnx_19610 0 1 18204 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.1 0 1 attn_mask.99 @attn_mask=(36,64,64)f32 #attn_mask.99=(36,64,64)f32 aten::size pnnx_19611 2 1 18192 18195 18211 #18192=(1,2304,192)f32 prim::NumToTensor pnnx_19612 1 1 18211 B.235 aten::Int pnnx_19613 1 1 B.235 18213 aten::Int pnnx_19614 1 1 B.235 18214 aten::size pnnx_19615 2 1 18192 18196 18215 #18192=(1,2304,192)f32 prim::NumToTensor pnnx_19616 1 1 18215 C.399 aten::Int pnnx_19617 1 1 C.399 18217 aten::Int pnnx_19618 1 1 C.399 18218 aten::Int pnnx_19619 1 1 C.399 18219 aten::Int pnnx_19620 1 1 C.399 18220 nn.LayerNorm layers_mmsa.4.residual_group.blocks.1.norm1 1 1 18192 18221 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #18192=(1,2304,192)f32 #18221=(1,2304,192)f32 prim::ListConstruct pnnx_19621 4 1 18214 1609 1849 18220 18222 prim::Constant pnnx_19623 0 1 26206 value=-4 prim::ListConstruct pnnx_19624 2 1 18197 26206 18224 prim::Constant pnnx_19625 0 1 26207 value=2 prim::ListConstruct pnnx_19626 2 1 18198 26207 18225 Tensor.view Tensor.view_2058 2 1 18221 18222 x.197 $input=18221 $shape=18222 #18221=(1,2304,192)f32 #x.197=(1,48,48,192)f32 prim::Constant pnnx_19628 0 1 26208 value=0 torch.roll torch.roll_2516 3 1 x.197 18224 18225 x0.125 $input=x.197 $shifts=18224 $dims=18225 #x.197=(1,48,48,192)f32 #x0.125=(1,48,48,192)f32 aten::size pnnx_19629 2 1 x0.125 26208 18227 #x0.125=(1,48,48,192)f32 prim::NumToTensor pnnx_19630 1 1 18227 B0.125 aten::Int pnnx_19631 1 1 B0.125 18229 prim::Constant pnnx_19632 0 1 26209 value=1 aten::size pnnx_19633 2 1 x0.125 26209 18230 #x0.125=(1,48,48,192)f32 prim::NumToTensor pnnx_19634 1 1 18230 18231 prim::Constant pnnx_19635 0 1 26210 value=2 aten::size pnnx_19636 2 1 x0.125 26210 18232 #x0.125=(1,48,48,192)f32 prim::NumToTensor pnnx_19637 1 1 18232 18233 aten::size pnnx_19638 2 1 x0.125 18199 18234 #x0.125=(1,48,48,192)f32 prim::NumToTensor pnnx_19639 1 1 18234 C0.125 aten::Int pnnx_19640 1 1 C0.125 18236 aten::Int pnnx_19641 1 1 C0.125 18237 aten::div pnnx_19642 3 1 18231 18194 18193 18238 aten::Int pnnx_19643 1 1 18238 18239 prim::Constant pnnx_19644 0 1 26211 value=8 prim::Constant pnnx_19645 0 1 26212 value=trunc aten::div pnnx_19646 3 1 18233 26211 26212 18240 aten::Int pnnx_19647 1 1 18240 18241 prim::Constant pnnx_19648 0 1 26213 value=8 prim::ListConstruct pnnx_19649 6 1 18229 18239 18200 18241 26213 18237 18242 prim::Constant pnnx_19651 0 1 26214 value=0 prim::Constant pnnx_19652 0 1 26215 value=1 prim::Constant pnnx_19653 0 1 26216 value=3 prim::Constant pnnx_19654 0 1 26217 value=2 prim::ListConstruct pnnx_19655 6 1 26214 26215 26216 26217 18201 18202 18244 Tensor.view Tensor.view_2059 2 1 x0.125 18242 x1.125 $input=x0.125 $shape=18242 #x0.125=(1,48,48,192)f32 #x1.125=(1,6,8,6,8,192)f32 prim::Constant pnnx_19659 0 1 26219 value=8 prim::Constant pnnx_19660 0 1 26220 value=8 prim::ListConstruct pnnx_19661 4 1 18203 26219 26220 18236 18247 torch.permute torch.permute_2916 2 1 x1.125 18244 18245 $input=x1.125 $dims=18244 #x1.125=(1,6,8,6,8,192)f32 #18245=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_293 1 1 18245 18246 memory_format=torch.contiguous_format $input=18245 #18245=(1,6,6,8,8,192)f32 #18246=(1,6,6,8,8,192)f32 prim::Constant pnnx_19663 0 1 26221 value=-1 prim::ListConstruct pnnx_19664 3 1 26221 18204 18219 18249 prim::Constant pnnx_19666 0 1 18251 value=1.767767e-01 prim::Constant pnnx_19667 0 1 18252 value=trunc prim::Constant pnnx_19668 0 1 18253 value=6 prim::Constant pnnx_19669 0 1 18254 value=0 prim::Constant pnnx_19670 0 1 18255 value=1 prim::Constant pnnx_19671 0 1 18256 value=2 prim::Constant pnnx_19672 0 1 18257 value=3 prim::Constant pnnx_19673 0 1 18258 value=6 prim::Constant pnnx_19674 0 1 18259 value=4 prim::Constant pnnx_19675 0 1 18260 value=-2 prim::Constant pnnx_19676 0 1 18261 value=-1 prim::Constant pnnx_19677 0 1 18262 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.1.attn 0 1 relative_position_bias_table.197 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.197=(225,6)f32 pnnx.Attribute layers_mmsa.4.residual_group.blocks.1.attn 0 1 relative_position_index.197 @relative_position_index=(64,64)i64 #relative_position_index.197=(64,64)i64 Tensor.view Tensor.view_2060 2 1 18246 18247 x_windows.197 $input=18246 $shape=18247 #18246=(1,6,6,8,8,192)f32 #x_windows.197=(36,8,8,192)f32 Tensor.view Tensor.view_2061 2 1 x_windows.197 18249 x2.125 $input=x_windows.197 $shape=18249 #x_windows.197=(36,8,8,192)f32 #x2.125=(36,64,192)f32 aten::size pnnx_19678 2 1 x2.125 18254 18270 #x2.125=(36,64,192)f32 prim::NumToTensor pnnx_19679 1 1 18270 B_.197 aten::Int pnnx_19680 1 1 B_.197 18272 aten::Int pnnx_19681 1 1 B_.197 18273 aten::size pnnx_19682 2 1 x2.125 18255 18274 #x2.125=(36,64,192)f32 prim::NumToTensor pnnx_19683 1 1 18274 N.197 aten::Int pnnx_19684 1 1 N.197 18276 aten::Int pnnx_19685 1 1 N.197 18277 aten::Int pnnx_19686 1 1 N.197 18278 aten::Int pnnx_19687 1 1 N.197 18279 aten::Int pnnx_19688 1 1 N.197 18280 aten::Int pnnx_19689 1 1 N.197 18281 aten::size pnnx_19690 2 1 x2.125 18256 18282 #x2.125=(36,64,192)f32 prim::NumToTensor pnnx_19691 1 1 18282 C.401 aten::Int pnnx_19692 1 1 C.401 18284 nn.Linear layers_mmsa.4.residual_group.blocks.1.attn.qkv 1 1 x2.125 18285 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.125=(36,64,192)f32 #18285=(36,64,576)f32 aten::div pnnx_19693 3 1 C.401 18253 18252 18286 aten::Int pnnx_19694 1 1 18286 18287 prim::ListConstruct pnnx_19695 5 1 18273 18281 18257 18258 18287 18288 prim::Constant pnnx_19697 0 1 26222 value=2 prim::Constant pnnx_19698 0 1 26223 value=0 prim::Constant pnnx_19699 0 1 26224 value=3 prim::Constant pnnx_19700 0 1 26225 value=1 prim::ListConstruct pnnx_19701 5 1 26222 26223 26224 26225 18259 18290 Tensor.reshape Tensor.reshape_628 2 1 18285 18288 18289 $input=18285 $shape=18288 #18285=(36,64,576)f32 #18289=(36,64,3,6,32)f32 prim::Constant pnnx_19703 0 1 26226 value=0 prim::Constant pnnx_19704 0 1 26227 value=0 prim::Constant pnnx_19706 0 1 26228 value=0 prim::Constant pnnx_19707 0 1 26229 value=1 prim::Constant pnnx_19709 0 1 26230 value=0 prim::Constant pnnx_19710 0 1 26231 value=2 torch.permute torch.permute_2917 2 1 18289 18290 qkv0.125 $input=18289 $dims=18290 #18289=(36,64,3,6,32)f32 #qkv0.125=(3,36,6,64,32)f32 Tensor.select Tensor.select_941 3 1 qkv0.125 26226 26227 q.197 $input=qkv0.125 $dim=26226 $index=26227 #qkv0.125=(3,36,6,64,32)f32 #q.197=(36,6,64,32)f32 aten::mul pnnx_19712 2 1 q.197 18251 q0.125 #q.197=(36,6,64,32)f32 #q0.125=(36,6,64,32)f32 Tensor.select Tensor.select_942 3 1 qkv0.125 26228 26229 k.197 $input=qkv0.125 $dim=26228 $index=26229 #qkv0.125=(3,36,6,64,32)f32 #k.197=(36,6,64,32)f32 prim::Constant pnnx_19715 0 1 26232 value=-1 prim::ListConstruct pnnx_19716 1 1 26232 18298 Tensor.view Tensor.view_2062 2 1 relative_position_index.197 18298 18299 $input=relative_position_index.197 $shape=18298 #relative_position_index.197=(64,64)i64 #18299=(4096)i64 prim::ListConstruct pnnx_19718 1 1 18299 18300 #18299=(4096)i64 prim::Constant pnnx_19720 0 1 26233 value=64 prim::Constant pnnx_19721 0 1 26234 value=-1 prim::ListConstruct pnnx_19722 3 1 18262 26233 26234 18302 Tensor.index Tensor.index_423 2 1 relative_position_bias_table.197 18300 18301 $input=relative_position_bias_table.197 $expr=18300 #relative_position_bias_table.197=(225,6)f32 #18301=(4096,6)f32 prim::Constant pnnx_19724 0 1 26235 value=2 prim::Constant pnnx_19725 0 1 26236 value=0 prim::Constant pnnx_19726 0 1 26237 value=1 prim::ListConstruct pnnx_19727 3 1 26235 26236 26237 18304 Tensor.view Tensor.view_2063 2 1 18301 18302 relative_position_bias.197 $input=18301 $shape=18302 #18301=(4096,6)f32 #relative_position_bias.197=(64,64,6)f32 prim::Constant pnnx_19731 0 1 26239 value=0 torch.permute torch.permute_2918 2 1 relative_position_bias.197 18304 18305 $input=relative_position_bias.197 $dims=18304 #relative_position_bias.197=(64,64,6)f32 #18305=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_294 1 1 18305 relative_position_bias0.125 memory_format=torch.contiguous_format $input=18305 #18305=(6,64,64)f32 #relative_position_bias0.125=(6,64,64)f32 prim::Constant pnnx_19733 0 1 26240 value=1 torch.transpose torch.transpose_3191 3 1 k.197 18260 18261 18296 $input=k.197 $dim0=18260 $dim1=18261 #k.197=(36,6,64,32)f32 #18296=(36,6,32,64)f32 torch.matmul torch.matmul_2398 2 1 q0.125 18296 attn.395 $input=q0.125 $other=18296 #q0.125=(36,6,64,32)f32 #18296=(36,6,32,64)f32 #attn.395=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3411 2 1 relative_position_bias0.125 26239 18307 $input=relative_position_bias0.125 $dim=26239 #relative_position_bias0.125=(6,64,64)f32 #18307=(1,6,64,64)f32 aten::add pnnx_19734 3 1 attn.395 18307 26240 attn0.63 #attn.395=(36,6,64,64)f32 #18307=(1,6,64,64)f32 #attn0.63=(36,6,64,64)f32 prim::Constant pnnx_19735 0 1 26241 value=0 aten::size pnnx_19736 2 1 attn_mask.99 26241 18309 #attn_mask.99=(36,64,64)f32 prim::NumToTensor pnnx_19737 1 1 18309 other.99 aten::Int pnnx_19738 1 1 other.99 18311 prim::Constant pnnx_19739 0 1 26242 value=trunc aten::div pnnx_19740 3 1 B_.197 other.99 26242 18312 aten::Int pnnx_19741 1 1 18312 18313 prim::Constant pnnx_19742 0 1 26243 value=6 prim::ListConstruct pnnx_19743 5 1 18313 18311 26243 18280 18279 18314 prim::Constant pnnx_19745 0 1 26244 value=1 prim::Constant pnnx_19747 0 1 26245 value=0 prim::Constant pnnx_19749 0 1 26246 value=1 Tensor.view Tensor.view_2064 2 1 attn0.63 18314 18315 $input=attn0.63 $shape=18314 #attn0.63=(36,6,64,64)f32 #18315=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3412 2 1 attn_mask.99 26244 18316 $input=attn_mask.99 $dim=26244 #attn_mask.99=(36,64,64)f32 #18316=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3413 2 1 18316 26245 18317 $input=18316 $dim=26245 #18316=(36,1,64,64)f32 #18317=(1,36,1,64,64)f32 aten::add pnnx_19750 3 1 18315 18317 26246 attn1.63 #18315=(1,36,6,64,64)f32 #18317=(1,36,1,64,64)f32 #attn1.63=(1,36,6,64,64)f32 prim::Constant pnnx_19751 0 1 26247 value=-1 prim::Constant pnnx_19752 0 1 26248 value=6 prim::ListConstruct pnnx_19753 4 1 26247 26248 18278 18277 18319 Tensor.view Tensor.view_2065 2 1 attn1.63 18319 input.441 $input=attn1.63 $shape=18319 #attn1.63=(1,36,6,64,64)f32 #input.441=(36,6,64,64)f32 nn.Softmax layers_mmsa.4.residual_group.blocks.1.attn.softmax 1 1 input.441 18321 dim=-1 #input.441=(36,6,64,64)f32 #18321=(36,6,64,64)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.1.attn.attn_drop 1 1 18321 18322 #18321=(36,6,64,64)f32 #18322=(36,6,64,64)f32 Tensor.select Tensor.select_943 3 1 qkv0.125 26230 26231 v.197 $input=qkv0.125 $dim=26230 $index=26231 #qkv0.125=(3,36,6,64,32)f32 #v.197=(36,6,64,32)f32 prim::Constant pnnx_19756 0 1 26249 value=1 prim::Constant pnnx_19757 0 1 26250 value=2 torch.matmul torch.matmul_2399 2 1 18322 v.197 18323 $input=18322 $other=v.197 #18322=(36,6,64,64)f32 #v.197=(36,6,64,32)f32 #18323=(36,6,64,32)f32 prim::ListConstruct pnnx_19759 3 1 18272 18276 18284 18325 torch.transpose torch.transpose_3192 3 1 18323 26249 26250 18324 $input=18323 $dim0=26249 $dim1=26250 #18323=(36,6,64,32)f32 #18324=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_629 2 1 18324 18325 input0.129 $input=18324 $shape=18325 #18324=(36,64,6,32)f32 #input0.129=(36,64,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.1.attn.proj 1 1 input0.129 18327 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.129=(36,64,192)f32 #18327=(36,64,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.1.attn.proj_drop 1 1 18327 18328 #18327=(36,64,192)f32 #18328=(36,64,192)f32 prim::Constant pnnx_19761 0 1 26251 value=-1 prim::Constant pnnx_19762 0 1 26252 value=8 prim::Constant pnnx_19763 0 1 26253 value=8 prim::ListConstruct pnnx_19764 4 1 26251 26252 26253 18218 18329 prim::Constant pnnx_19766 0 1 26254 value=8 prim::Constant pnnx_19767 0 1 26255 value=trunc aten::div pnnx_19768 3 1 H1.1 26254 26255 18331 aten::Int pnnx_19769 1 1 18331 18332 prim::Constant pnnx_19770 0 1 26256 value=8 prim::Constant pnnx_19771 0 1 26257 value=trunc aten::div pnnx_19772 3 1 W1.1 26256 26257 18333 aten::Int pnnx_19773 1 1 18333 18334 prim::Constant pnnx_19774 0 1 26258 value=1 prim::Constant pnnx_19775 0 1 26259 value=8 prim::Constant pnnx_19776 0 1 26260 value=8 prim::Constant pnnx_19777 0 1 26261 value=-1 prim::ListConstruct pnnx_19778 6 1 26258 18332 18334 26259 26260 26261 18335 prim::Constant pnnx_19780 0 1 26262 value=0 prim::Constant pnnx_19781 0 1 26263 value=1 prim::Constant pnnx_19782 0 1 26264 value=3 prim::Constant pnnx_19783 0 1 26265 value=2 prim::Constant pnnx_19784 0 1 26266 value=4 prim::Constant pnnx_19785 0 1 26267 value=5 prim::ListConstruct pnnx_19786 6 1 26262 26263 26264 26265 26266 26267 18337 Tensor.view Tensor.view_2066 2 1 18328 18329 windows.197 $input=18328 $shape=18329 #18328=(36,64,192)f32 #windows.197=(36,8,8,192)f32 Tensor.view Tensor.view_2067 2 1 windows.197 18335 x3.125 $input=windows.197 $shape=18335 #windows.197=(36,8,8,192)f32 #x3.125=(1,6,6,8,8,192)f32 prim::Constant pnnx_19790 0 1 26269 value=1 prim::Constant pnnx_19791 0 1 26270 value=-1 prim::ListConstruct pnnx_19792 4 1 26269 1606 1846 26270 18340 torch.permute torch.permute_2919 2 1 x3.125 18337 18338 $input=x3.125 $dims=18337 #x3.125=(1,6,6,8,8,192)f32 #18338=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_295 1 1 18338 18339 memory_format=torch.contiguous_format $input=18338 #18338=(1,6,8,6,8,192)f32 #18339=(1,6,8,6,8,192)f32 prim::Constant pnnx_19794 0 1 26271 value=4 prim::Constant pnnx_19795 0 1 26272 value=4 prim::ListConstruct pnnx_19796 2 1 26271 26272 18342 prim::Constant pnnx_19797 0 1 26273 value=1 prim::Constant pnnx_19798 0 1 26274 value=2 prim::ListConstruct pnnx_19799 2 1 26273 26274 18343 Tensor.view Tensor.view_2068 2 1 18339 18340 shifted_x.99 $input=18339 $shape=18340 #18339=(1,6,8,6,8,192)f32 #shifted_x.99=(1,48,48,192)f32 aten::mul pnnx_19801 2 1 H1.1 W1.1 18345 aten::Int pnnx_19802 1 1 18345 18346 prim::ListConstruct pnnx_19803 3 1 18213 18346 18217 18347 prim::Constant pnnx_19805 0 1 18349 value=None prim::Constant pnnx_19806 0 1 26275 value=1 torch.roll torch.roll_2517 3 1 shifted_x.99 18342 18343 x4.125 $input=shifted_x.99 $shifts=18342 $dims=18343 #shifted_x.99=(1,48,48,192)f32 #x4.125=(1,48,48,192)f32 Tensor.view Tensor.view_2069 2 1 x4.125 18347 x5.99 $input=x4.125 $shape=18347 #x4.125=(1,48,48,192)f32 #x5.99=(1,2304,192)f32 aten::add pnnx_19807 3 1 18192 x5.99 26275 input.443 #18192=(1,2304,192)f32 #x5.99=(1,2304,192)f32 #input.443=(1,2304,192)f32 nn.LayerNorm layers_mmsa.4.residual_group.blocks.1.norm2 1 1 input.443 18351 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.443=(1,2304,192)f32 #18351=(1,2304,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.1.mlp.fc1 1 1 18351 18356 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18351=(1,2304,192)f32 #18356=(1,2304,384)f32 nn.GELU layers_mmsa.4.residual_group.blocks.1.mlp.act 1 1 18356 18357 #18356=(1,2304,384)f32 #18357=(1,2304,384)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.1.mlp.drop 1 1 18357 18358 #18357=(1,2304,384)f32 #18358=(1,2304,384)f32 nn.Linear layers_mmsa.4.residual_group.blocks.1.mlp.fc2 1 1 18358 18359 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18358=(1,2304,384)f32 #18359=(1,2304,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.1.mlp.drop 1 1 18359 18360 #18359=(1,2304,192)f32 #18360=(1,2304,192)f32 prim::Constant pnnx_19808 0 1 18361 value=None prim::Constant pnnx_19809 0 1 26276 value=1 aten::add pnnx_19810 3 1 input.443 18360 26276 18362 #input.443=(1,2304,192)f32 #18360=(1,2304,192)f32 #18362=(1,2304,192)f32 prim::Constant pnnx_19811 0 1 18363 value=trunc prim::Constant pnnx_19812 0 1 18364 value=8 prim::Constant pnnx_19813 0 1 18365 value=0 prim::Constant pnnx_19814 0 1 18366 value=2 prim::Constant pnnx_19815 0 1 18367 value=1 prim::Constant pnnx_19816 0 1 18368 value=3 prim::Constant pnnx_19817 0 1 18369 value=8 prim::Constant pnnx_19818 0 1 18370 value=4 prim::Constant pnnx_19819 0 1 18371 value=5 prim::Constant pnnx_19820 0 1 18372 value=-1 prim::Constant pnnx_19821 0 1 18373 value=64 aten::size pnnx_19822 2 1 18362 18365 18379 #18362=(1,2304,192)f32 prim::NumToTensor pnnx_19823 1 1 18379 B.237 aten::Int pnnx_19824 1 1 B.237 18381 aten::Int pnnx_19825 1 1 B.237 18382 aten::size pnnx_19826 2 1 18362 18366 18383 #18362=(1,2304,192)f32 prim::NumToTensor pnnx_19827 1 1 18383 C.403 aten::Int pnnx_19828 1 1 C.403 18385 aten::Int pnnx_19829 1 1 C.403 18386 aten::Int pnnx_19830 1 1 C.403 18387 aten::Int pnnx_19831 1 1 C.403 18388 nn.LayerNorm layers_mmsa.4.residual_group.blocks.2.norm1 1 1 18362 18389 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #18362=(1,2304,192)f32 #18389=(1,2304,192)f32 prim::ListConstruct pnnx_19832 4 1 18382 1603 1843 18388 18390 prim::Constant pnnx_19834 0 1 26277 value=0 Tensor.view Tensor.view_2070 2 1 18389 18390 x.199 $input=18389 $shape=18390 #18389=(1,2304,192)f32 #x.199=(1,48,48,192)f32 aten::size pnnx_19835 2 1 x.199 26277 18392 #x.199=(1,48,48,192)f32 prim::NumToTensor pnnx_19836 1 1 18392 B0.127 aten::Int pnnx_19837 1 1 B0.127 18394 aten::size pnnx_19838 2 1 x.199 18367 18395 #x.199=(1,48,48,192)f32 prim::NumToTensor pnnx_19839 1 1 18395 18396 prim::Constant pnnx_19840 0 1 26278 value=2 aten::size pnnx_19841 2 1 x.199 26278 18397 #x.199=(1,48,48,192)f32 prim::NumToTensor pnnx_19842 1 1 18397 18398 aten::size pnnx_19843 2 1 x.199 18368 18399 #x.199=(1,48,48,192)f32 prim::NumToTensor pnnx_19844 1 1 18399 C0.127 aten::Int pnnx_19845 1 1 C0.127 18401 aten::Int pnnx_19846 1 1 C0.127 18402 aten::div pnnx_19847 3 1 18396 18364 18363 18403 aten::Int pnnx_19848 1 1 18403 18404 prim::Constant pnnx_19849 0 1 26279 value=8 prim::Constant pnnx_19850 0 1 26280 value=trunc aten::div pnnx_19851 3 1 18398 26279 26280 18405 aten::Int pnnx_19852 1 1 18405 18406 prim::Constant pnnx_19853 0 1 26281 value=8 prim::ListConstruct pnnx_19854 6 1 18394 18404 18369 18406 26281 18402 18407 prim::Constant pnnx_19856 0 1 26282 value=0 prim::Constant pnnx_19857 0 1 26283 value=1 prim::Constant pnnx_19858 0 1 26284 value=3 prim::Constant pnnx_19859 0 1 26285 value=2 prim::ListConstruct pnnx_19860 6 1 26282 26283 26284 26285 18370 18371 18409 Tensor.view Tensor.view_2071 2 1 x.199 18407 x0.127 $input=x.199 $shape=18407 #x.199=(1,48,48,192)f32 #x0.127=(1,6,8,6,8,192)f32 prim::Constant pnnx_19864 0 1 26287 value=8 prim::Constant pnnx_19865 0 1 26288 value=8 prim::ListConstruct pnnx_19866 4 1 18372 26287 26288 18401 18412 torch.permute torch.permute_2920 2 1 x0.127 18409 18410 $input=x0.127 $dims=18409 #x0.127=(1,6,8,6,8,192)f32 #18410=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_296 1 1 18410 18411 memory_format=torch.contiguous_format $input=18410 #18410=(1,6,6,8,8,192)f32 #18411=(1,6,6,8,8,192)f32 prim::Constant pnnx_19868 0 1 26289 value=-1 prim::ListConstruct pnnx_19869 3 1 26289 18373 18387 18414 prim::Constant pnnx_19871 0 1 18416 value=1.767767e-01 prim::Constant pnnx_19872 0 1 18417 value=trunc prim::Constant pnnx_19873 0 1 18418 value=6 prim::Constant pnnx_19874 0 1 18419 value=0 prim::Constant pnnx_19875 0 1 18420 value=1 prim::Constant pnnx_19876 0 1 18421 value=2 prim::Constant pnnx_19877 0 1 18422 value=3 prim::Constant pnnx_19878 0 1 18423 value=6 prim::Constant pnnx_19879 0 1 18424 value=4 prim::Constant pnnx_19880 0 1 18425 value=-2 prim::Constant pnnx_19881 0 1 18426 value=-1 prim::Constant pnnx_19882 0 1 18427 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.2.attn 0 1 relative_position_bias_table.199 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.199=(225,6)f32 pnnx.Attribute layers_mmsa.4.residual_group.blocks.2.attn 0 1 relative_position_index.199 @relative_position_index=(64,64)i64 #relative_position_index.199=(64,64)i64 Tensor.view Tensor.view_2072 2 1 18411 18412 x_windows.199 $input=18411 $shape=18412 #18411=(1,6,6,8,8,192)f32 #x_windows.199=(36,8,8,192)f32 Tensor.view Tensor.view_2073 2 1 x_windows.199 18414 x1.127 $input=x_windows.199 $shape=18414 #x_windows.199=(36,8,8,192)f32 #x1.127=(36,64,192)f32 aten::size pnnx_19883 2 1 x1.127 18419 18435 #x1.127=(36,64,192)f32 prim::NumToTensor pnnx_19884 1 1 18435 B_.199 aten::Int pnnx_19885 1 1 B_.199 18437 aten::Int pnnx_19886 1 1 B_.199 18438 aten::size pnnx_19887 2 1 x1.127 18420 18439 #x1.127=(36,64,192)f32 prim::NumToTensor pnnx_19888 1 1 18439 N.199 aten::Int pnnx_19889 1 1 N.199 18441 aten::Int pnnx_19890 1 1 N.199 18442 aten::size pnnx_19891 2 1 x1.127 18421 18443 #x1.127=(36,64,192)f32 prim::NumToTensor pnnx_19892 1 1 18443 C.405 aten::Int pnnx_19893 1 1 C.405 18445 nn.Linear layers_mmsa.4.residual_group.blocks.2.attn.qkv 1 1 x1.127 18446 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.127=(36,64,192)f32 #18446=(36,64,576)f32 aten::div pnnx_19894 3 1 C.405 18418 18417 18447 aten::Int pnnx_19895 1 1 18447 18448 prim::ListConstruct pnnx_19896 5 1 18438 18442 18422 18423 18448 18449 prim::Constant pnnx_19898 0 1 26290 value=2 prim::Constant pnnx_19899 0 1 26291 value=0 prim::Constant pnnx_19900 0 1 26292 value=3 prim::Constant pnnx_19901 0 1 26293 value=1 prim::ListConstruct pnnx_19902 5 1 26290 26291 26292 26293 18424 18451 Tensor.reshape Tensor.reshape_630 2 1 18446 18449 18450 $input=18446 $shape=18449 #18446=(36,64,576)f32 #18450=(36,64,3,6,32)f32 prim::Constant pnnx_19904 0 1 26294 value=0 prim::Constant pnnx_19905 0 1 26295 value=0 prim::Constant pnnx_19907 0 1 26296 value=0 prim::Constant pnnx_19908 0 1 26297 value=1 prim::Constant pnnx_19910 0 1 26298 value=0 prim::Constant pnnx_19911 0 1 26299 value=2 torch.permute torch.permute_2921 2 1 18450 18451 qkv0.127 $input=18450 $dims=18451 #18450=(36,64,3,6,32)f32 #qkv0.127=(3,36,6,64,32)f32 Tensor.select Tensor.select_944 3 1 qkv0.127 26294 26295 q.199 $input=qkv0.127 $dim=26294 $index=26295 #qkv0.127=(3,36,6,64,32)f32 #q.199=(36,6,64,32)f32 aten::mul pnnx_19913 2 1 q.199 18416 q0.127 #q.199=(36,6,64,32)f32 #q0.127=(36,6,64,32)f32 Tensor.select Tensor.select_945 3 1 qkv0.127 26296 26297 k.199 $input=qkv0.127 $dim=26296 $index=26297 #qkv0.127=(3,36,6,64,32)f32 #k.199=(36,6,64,32)f32 prim::Constant pnnx_19916 0 1 26300 value=-1 prim::ListConstruct pnnx_19917 1 1 26300 18459 Tensor.view Tensor.view_2074 2 1 relative_position_index.199 18459 18460 $input=relative_position_index.199 $shape=18459 #relative_position_index.199=(64,64)i64 #18460=(4096)i64 prim::ListConstruct pnnx_19919 1 1 18460 18461 #18460=(4096)i64 prim::Constant pnnx_19921 0 1 26301 value=64 prim::Constant pnnx_19922 0 1 26302 value=-1 prim::ListConstruct pnnx_19923 3 1 18427 26301 26302 18463 Tensor.index Tensor.index_424 2 1 relative_position_bias_table.199 18461 18462 $input=relative_position_bias_table.199 $expr=18461 #relative_position_bias_table.199=(225,6)f32 #18462=(4096,6)f32 prim::Constant pnnx_19925 0 1 26303 value=2 prim::Constant pnnx_19926 0 1 26304 value=0 prim::Constant pnnx_19927 0 1 26305 value=1 prim::ListConstruct pnnx_19928 3 1 26303 26304 26305 18465 Tensor.view Tensor.view_2075 2 1 18462 18463 relative_position_bias.199 $input=18462 $shape=18463 #18462=(4096,6)f32 #relative_position_bias.199=(64,64,6)f32 prim::Constant pnnx_19932 0 1 26307 value=0 torch.permute torch.permute_2922 2 1 relative_position_bias.199 18465 18466 $input=relative_position_bias.199 $dims=18465 #relative_position_bias.199=(64,64,6)f32 #18466=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_297 1 1 18466 relative_position_bias0.127 memory_format=torch.contiguous_format $input=18466 #18466=(6,64,64)f32 #relative_position_bias0.127=(6,64,64)f32 prim::Constant pnnx_19934 0 1 26308 value=1 torch.transpose torch.transpose_3193 3 1 k.199 18425 18426 18457 $input=k.199 $dim0=18425 $dim1=18426 #k.199=(36,6,64,32)f32 #18457=(36,6,32,64)f32 torch.matmul torch.matmul_2400 2 1 q0.127 18457 attn.399 $input=q0.127 $other=18457 #q0.127=(36,6,64,32)f32 #18457=(36,6,32,64)f32 #attn.399=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3414 2 1 relative_position_bias0.127 26307 18468 $input=relative_position_bias0.127 $dim=26307 #relative_position_bias0.127=(6,64,64)f32 #18468=(1,6,64,64)f32 aten::add pnnx_19935 3 1 attn.399 18468 26308 input.445 #attn.399=(36,6,64,64)f32 #18468=(1,6,64,64)f32 #input.445=(36,6,64,64)f32 nn.Softmax layers_mmsa.4.residual_group.blocks.2.attn.softmax 1 1 input.445 18470 dim=-1 #input.445=(36,6,64,64)f32 #18470=(36,6,64,64)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.2.attn.attn_drop 1 1 18470 18471 #18470=(36,6,64,64)f32 #18471=(36,6,64,64)f32 Tensor.select Tensor.select_946 3 1 qkv0.127 26298 26299 v.199 $input=qkv0.127 $dim=26298 $index=26299 #qkv0.127=(3,36,6,64,32)f32 #v.199=(36,6,64,32)f32 prim::Constant pnnx_19937 0 1 26309 value=1 prim::Constant pnnx_19938 0 1 26310 value=2 torch.matmul torch.matmul_2401 2 1 18471 v.199 18472 $input=18471 $other=v.199 #18471=(36,6,64,64)f32 #v.199=(36,6,64,32)f32 #18472=(36,6,64,32)f32 prim::ListConstruct pnnx_19940 3 1 18437 18441 18445 18474 torch.transpose torch.transpose_3194 3 1 18472 26309 26310 18473 $input=18472 $dim0=26309 $dim1=26310 #18472=(36,6,64,32)f32 #18473=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_631 2 1 18473 18474 input0.131 $input=18473 $shape=18474 #18473=(36,64,6,32)f32 #input0.131=(36,64,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.2.attn.proj 1 1 input0.131 18476 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.131=(36,64,192)f32 #18476=(36,64,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.2.attn.proj_drop 1 1 18476 18477 #18476=(36,64,192)f32 #18477=(36,64,192)f32 prim::Constant pnnx_19942 0 1 26311 value=-1 prim::Constant pnnx_19943 0 1 26312 value=8 prim::Constant pnnx_19944 0 1 26313 value=8 prim::ListConstruct pnnx_19945 4 1 26311 26312 26313 18386 18478 prim::Constant pnnx_19947 0 1 26314 value=8 prim::Constant pnnx_19948 0 1 26315 value=trunc aten::div pnnx_19949 3 1 H1.1 26314 26315 18480 aten::Int pnnx_19950 1 1 18480 18481 prim::Constant pnnx_19951 0 1 26316 value=8 prim::Constant pnnx_19952 0 1 26317 value=trunc aten::div pnnx_19953 3 1 W1.1 26316 26317 18482 aten::Int pnnx_19954 1 1 18482 18483 prim::Constant pnnx_19955 0 1 26318 value=1 prim::Constant pnnx_19956 0 1 26319 value=8 prim::Constant pnnx_19957 0 1 26320 value=8 prim::Constant pnnx_19958 0 1 26321 value=-1 prim::ListConstruct pnnx_19959 6 1 26318 18481 18483 26319 26320 26321 18484 prim::Constant pnnx_19961 0 1 26322 value=0 prim::Constant pnnx_19962 0 1 26323 value=1 prim::Constant pnnx_19963 0 1 26324 value=3 prim::Constant pnnx_19964 0 1 26325 value=2 prim::Constant pnnx_19965 0 1 26326 value=4 prim::Constant pnnx_19966 0 1 26327 value=5 prim::ListConstruct pnnx_19967 6 1 26322 26323 26324 26325 26326 26327 18486 Tensor.view Tensor.view_2076 2 1 18477 18478 windows.199 $input=18477 $shape=18478 #18477=(36,64,192)f32 #windows.199=(36,8,8,192)f32 Tensor.view Tensor.view_2077 2 1 windows.199 18484 x2.127 $input=windows.199 $shape=18484 #windows.199=(36,8,8,192)f32 #x2.127=(1,6,6,8,8,192)f32 prim::Constant pnnx_19971 0 1 26329 value=1 prim::Constant pnnx_19972 0 1 26330 value=-1 prim::ListConstruct pnnx_19973 4 1 26329 1600 1840 26330 18489 torch.permute torch.permute_2923 2 1 x2.127 18486 18487 $input=x2.127 $dims=18486 #x2.127=(1,6,6,8,8,192)f32 #18487=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_298 1 1 18487 18488 memory_format=torch.contiguous_format $input=18487 #18487=(1,6,8,6,8,192)f32 #18488=(1,6,8,6,8,192)f32 aten::mul pnnx_19975 2 1 H1.1 W1.1 18491 aten::Int pnnx_19976 1 1 18491 18492 prim::ListConstruct pnnx_19977 3 1 18381 18492 18385 18493 prim::Constant pnnx_19979 0 1 18495 value=None prim::Constant pnnx_19980 0 1 26331 value=1 Tensor.view Tensor.view_2078 2 1 18488 18489 x3.127 $input=18488 $shape=18489 #18488=(1,6,8,6,8,192)f32 #x3.127=(1,48,48,192)f32 Tensor.view Tensor.view_2079 2 1 x3.127 18493 x4.127 $input=x3.127 $shape=18493 #x3.127=(1,48,48,192)f32 #x4.127=(1,2304,192)f32 aten::add pnnx_19981 3 1 18362 x4.127 26331 input.447 #18362=(1,2304,192)f32 #x4.127=(1,2304,192)f32 #input.447=(1,2304,192)f32 nn.LayerNorm layers_mmsa.4.residual_group.blocks.2.norm2 1 1 input.447 18497 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.447=(1,2304,192)f32 #18497=(1,2304,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.2.mlp.fc1 1 1 18497 18502 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18497=(1,2304,192)f32 #18502=(1,2304,384)f32 nn.GELU layers_mmsa.4.residual_group.blocks.2.mlp.act 1 1 18502 18503 #18502=(1,2304,384)f32 #18503=(1,2304,384)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.2.mlp.drop 1 1 18503 18504 #18503=(1,2304,384)f32 #18504=(1,2304,384)f32 nn.Linear layers_mmsa.4.residual_group.blocks.2.mlp.fc2 1 1 18504 18505 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18504=(1,2304,384)f32 #18505=(1,2304,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.2.mlp.drop 1 1 18505 18506 #18505=(1,2304,192)f32 #18506=(1,2304,192)f32 prim::Constant pnnx_19982 0 1 18507 value=None prim::Constant pnnx_19983 0 1 26332 value=1 aten::add pnnx_19984 3 1 input.447 18506 26332 18508 #input.447=(1,2304,192)f32 #18506=(1,2304,192)f32 #18508=(1,2304,192)f32 prim::Constant pnnx_19985 0 1 18509 value=trunc prim::Constant pnnx_19986 0 1 18510 value=8 prim::Constant pnnx_19987 0 1 18511 value=0 prim::Constant pnnx_19988 0 1 18512 value=2 prim::Constant pnnx_19989 0 1 18513 value=-4 prim::Constant pnnx_19990 0 1 18514 value=1 prim::Constant pnnx_19991 0 1 18515 value=3 prim::Constant pnnx_19992 0 1 18516 value=8 prim::Constant pnnx_19993 0 1 18517 value=4 prim::Constant pnnx_19994 0 1 18518 value=5 prim::Constant pnnx_19995 0 1 18519 value=-1 prim::Constant pnnx_19996 0 1 18520 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.3 0 1 attn_mask.101 @attn_mask=(36,64,64)f32 #attn_mask.101=(36,64,64)f32 aten::size pnnx_19997 2 1 18508 18511 18527 #18508=(1,2304,192)f32 prim::NumToTensor pnnx_19998 1 1 18527 B.239 aten::Int pnnx_19999 1 1 B.239 18529 aten::Int pnnx_20000 1 1 B.239 18530 aten::size pnnx_20001 2 1 18508 18512 18531 #18508=(1,2304,192)f32 prim::NumToTensor pnnx_20002 1 1 18531 C.407 aten::Int pnnx_20003 1 1 C.407 18533 aten::Int pnnx_20004 1 1 C.407 18534 aten::Int pnnx_20005 1 1 C.407 18535 aten::Int pnnx_20006 1 1 C.407 18536 nn.LayerNorm layers_mmsa.4.residual_group.blocks.3.norm1 1 1 18508 18537 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #18508=(1,2304,192)f32 #18537=(1,2304,192)f32 prim::ListConstruct pnnx_20007 4 1 18530 1597 1837 18536 18538 prim::Constant pnnx_20009 0 1 26333 value=-4 prim::ListConstruct pnnx_20010 2 1 18513 26333 18540 prim::Constant pnnx_20011 0 1 26334 value=2 prim::ListConstruct pnnx_20012 2 1 18514 26334 18541 Tensor.view Tensor.view_2080 2 1 18537 18538 x.201 $input=18537 $shape=18538 #18537=(1,2304,192)f32 #x.201=(1,48,48,192)f32 prim::Constant pnnx_20014 0 1 26335 value=0 torch.roll torch.roll_2518 3 1 x.201 18540 18541 x0.129 $input=x.201 $shifts=18540 $dims=18541 #x.201=(1,48,48,192)f32 #x0.129=(1,48,48,192)f32 aten::size pnnx_20015 2 1 x0.129 26335 18543 #x0.129=(1,48,48,192)f32 prim::NumToTensor pnnx_20016 1 1 18543 B0.129 aten::Int pnnx_20017 1 1 B0.129 18545 prim::Constant pnnx_20018 0 1 26336 value=1 aten::size pnnx_20019 2 1 x0.129 26336 18546 #x0.129=(1,48,48,192)f32 prim::NumToTensor pnnx_20020 1 1 18546 18547 prim::Constant pnnx_20021 0 1 26337 value=2 aten::size pnnx_20022 2 1 x0.129 26337 18548 #x0.129=(1,48,48,192)f32 prim::NumToTensor pnnx_20023 1 1 18548 18549 aten::size pnnx_20024 2 1 x0.129 18515 18550 #x0.129=(1,48,48,192)f32 prim::NumToTensor pnnx_20025 1 1 18550 C0.129 aten::Int pnnx_20026 1 1 C0.129 18552 aten::Int pnnx_20027 1 1 C0.129 18553 aten::div pnnx_20028 3 1 18547 18510 18509 18554 aten::Int pnnx_20029 1 1 18554 18555 prim::Constant pnnx_20030 0 1 26338 value=8 prim::Constant pnnx_20031 0 1 26339 value=trunc aten::div pnnx_20032 3 1 18549 26338 26339 18556 aten::Int pnnx_20033 1 1 18556 18557 prim::Constant pnnx_20034 0 1 26340 value=8 prim::ListConstruct pnnx_20035 6 1 18545 18555 18516 18557 26340 18553 18558 prim::Constant pnnx_20037 0 1 26341 value=0 prim::Constant pnnx_20038 0 1 26342 value=1 prim::Constant pnnx_20039 0 1 26343 value=3 prim::Constant pnnx_20040 0 1 26344 value=2 prim::ListConstruct pnnx_20041 6 1 26341 26342 26343 26344 18517 18518 18560 Tensor.view Tensor.view_2081 2 1 x0.129 18558 x1.129 $input=x0.129 $shape=18558 #x0.129=(1,48,48,192)f32 #x1.129=(1,6,8,6,8,192)f32 prim::Constant pnnx_20045 0 1 26346 value=8 prim::Constant pnnx_20046 0 1 26347 value=8 prim::ListConstruct pnnx_20047 4 1 18519 26346 26347 18552 18563 torch.permute torch.permute_2924 2 1 x1.129 18560 18561 $input=x1.129 $dims=18560 #x1.129=(1,6,8,6,8,192)f32 #18561=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_299 1 1 18561 18562 memory_format=torch.contiguous_format $input=18561 #18561=(1,6,6,8,8,192)f32 #18562=(1,6,6,8,8,192)f32 prim::Constant pnnx_20049 0 1 26348 value=-1 prim::ListConstruct pnnx_20050 3 1 26348 18520 18535 18565 prim::Constant pnnx_20052 0 1 18567 value=1.767767e-01 prim::Constant pnnx_20053 0 1 18568 value=trunc prim::Constant pnnx_20054 0 1 18569 value=6 prim::Constant pnnx_20055 0 1 18570 value=0 prim::Constant pnnx_20056 0 1 18571 value=1 prim::Constant pnnx_20057 0 1 18572 value=2 prim::Constant pnnx_20058 0 1 18573 value=3 prim::Constant pnnx_20059 0 1 18574 value=6 prim::Constant pnnx_20060 0 1 18575 value=4 prim::Constant pnnx_20061 0 1 18576 value=-2 prim::Constant pnnx_20062 0 1 18577 value=-1 prim::Constant pnnx_20063 0 1 18578 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.3.attn 0 1 relative_position_bias_table.201 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.201=(225,6)f32 pnnx.Attribute layers_mmsa.4.residual_group.blocks.3.attn 0 1 relative_position_index.201 @relative_position_index=(64,64)i64 #relative_position_index.201=(64,64)i64 Tensor.view Tensor.view_2082 2 1 18562 18563 x_windows.201 $input=18562 $shape=18563 #18562=(1,6,6,8,8,192)f32 #x_windows.201=(36,8,8,192)f32 Tensor.view Tensor.view_2083 2 1 x_windows.201 18565 x2.129 $input=x_windows.201 $shape=18565 #x_windows.201=(36,8,8,192)f32 #x2.129=(36,64,192)f32 aten::size pnnx_20064 2 1 x2.129 18570 18586 #x2.129=(36,64,192)f32 prim::NumToTensor pnnx_20065 1 1 18586 B_.201 aten::Int pnnx_20066 1 1 B_.201 18588 aten::Int pnnx_20067 1 1 B_.201 18589 aten::size pnnx_20068 2 1 x2.129 18571 18590 #x2.129=(36,64,192)f32 prim::NumToTensor pnnx_20069 1 1 18590 N.201 aten::Int pnnx_20070 1 1 N.201 18592 aten::Int pnnx_20071 1 1 N.201 18593 aten::Int pnnx_20072 1 1 N.201 18594 aten::Int pnnx_20073 1 1 N.201 18595 aten::Int pnnx_20074 1 1 N.201 18596 aten::Int pnnx_20075 1 1 N.201 18597 aten::size pnnx_20076 2 1 x2.129 18572 18598 #x2.129=(36,64,192)f32 prim::NumToTensor pnnx_20077 1 1 18598 C.409 aten::Int pnnx_20078 1 1 C.409 18600 nn.Linear layers_mmsa.4.residual_group.blocks.3.attn.qkv 1 1 x2.129 18601 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.129=(36,64,192)f32 #18601=(36,64,576)f32 aten::div pnnx_20079 3 1 C.409 18569 18568 18602 aten::Int pnnx_20080 1 1 18602 18603 prim::ListConstruct pnnx_20081 5 1 18589 18597 18573 18574 18603 18604 prim::Constant pnnx_20083 0 1 26349 value=2 prim::Constant pnnx_20084 0 1 26350 value=0 prim::Constant pnnx_20085 0 1 26351 value=3 prim::Constant pnnx_20086 0 1 26352 value=1 prim::ListConstruct pnnx_20087 5 1 26349 26350 26351 26352 18575 18606 Tensor.reshape Tensor.reshape_632 2 1 18601 18604 18605 $input=18601 $shape=18604 #18601=(36,64,576)f32 #18605=(36,64,3,6,32)f32 prim::Constant pnnx_20089 0 1 26353 value=0 prim::Constant pnnx_20090 0 1 26354 value=0 prim::Constant pnnx_20092 0 1 26355 value=0 prim::Constant pnnx_20093 0 1 26356 value=1 prim::Constant pnnx_20095 0 1 26357 value=0 prim::Constant pnnx_20096 0 1 26358 value=2 torch.permute torch.permute_2925 2 1 18605 18606 qkv0.129 $input=18605 $dims=18606 #18605=(36,64,3,6,32)f32 #qkv0.129=(3,36,6,64,32)f32 Tensor.select Tensor.select_947 3 1 qkv0.129 26353 26354 q.201 $input=qkv0.129 $dim=26353 $index=26354 #qkv0.129=(3,36,6,64,32)f32 #q.201=(36,6,64,32)f32 aten::mul pnnx_20098 2 1 q.201 18567 q0.129 #q.201=(36,6,64,32)f32 #q0.129=(36,6,64,32)f32 Tensor.select Tensor.select_948 3 1 qkv0.129 26355 26356 k.201 $input=qkv0.129 $dim=26355 $index=26356 #qkv0.129=(3,36,6,64,32)f32 #k.201=(36,6,64,32)f32 prim::Constant pnnx_20101 0 1 26359 value=-1 prim::ListConstruct pnnx_20102 1 1 26359 18614 Tensor.view Tensor.view_2084 2 1 relative_position_index.201 18614 18615 $input=relative_position_index.201 $shape=18614 #relative_position_index.201=(64,64)i64 #18615=(4096)i64 prim::ListConstruct pnnx_20104 1 1 18615 18616 #18615=(4096)i64 prim::Constant pnnx_20106 0 1 26360 value=64 prim::Constant pnnx_20107 0 1 26361 value=-1 prim::ListConstruct pnnx_20108 3 1 18578 26360 26361 18618 Tensor.index Tensor.index_425 2 1 relative_position_bias_table.201 18616 18617 $input=relative_position_bias_table.201 $expr=18616 #relative_position_bias_table.201=(225,6)f32 #18617=(4096,6)f32 prim::Constant pnnx_20110 0 1 26362 value=2 prim::Constant pnnx_20111 0 1 26363 value=0 prim::Constant pnnx_20112 0 1 26364 value=1 prim::ListConstruct pnnx_20113 3 1 26362 26363 26364 18620 Tensor.view Tensor.view_2085 2 1 18617 18618 relative_position_bias.201 $input=18617 $shape=18618 #18617=(4096,6)f32 #relative_position_bias.201=(64,64,6)f32 prim::Constant pnnx_20117 0 1 26366 value=0 torch.permute torch.permute_2926 2 1 relative_position_bias.201 18620 18621 $input=relative_position_bias.201 $dims=18620 #relative_position_bias.201=(64,64,6)f32 #18621=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_300 1 1 18621 relative_position_bias0.129 memory_format=torch.contiguous_format $input=18621 #18621=(6,64,64)f32 #relative_position_bias0.129=(6,64,64)f32 prim::Constant pnnx_20119 0 1 26367 value=1 torch.transpose torch.transpose_3195 3 1 k.201 18576 18577 18612 $input=k.201 $dim0=18576 $dim1=18577 #k.201=(36,6,64,32)f32 #18612=(36,6,32,64)f32 torch.matmul torch.matmul_2402 2 1 q0.129 18612 attn.403 $input=q0.129 $other=18612 #q0.129=(36,6,64,32)f32 #18612=(36,6,32,64)f32 #attn.403=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3415 2 1 relative_position_bias0.129 26366 18623 $input=relative_position_bias0.129 $dim=26366 #relative_position_bias0.129=(6,64,64)f32 #18623=(1,6,64,64)f32 aten::add pnnx_20120 3 1 attn.403 18623 26367 attn0.65 #attn.403=(36,6,64,64)f32 #18623=(1,6,64,64)f32 #attn0.65=(36,6,64,64)f32 prim::Constant pnnx_20121 0 1 26368 value=0 aten::size pnnx_20122 2 1 attn_mask.101 26368 18625 #attn_mask.101=(36,64,64)f32 prim::NumToTensor pnnx_20123 1 1 18625 other.101 aten::Int pnnx_20124 1 1 other.101 18627 prim::Constant pnnx_20125 0 1 26369 value=trunc aten::div pnnx_20126 3 1 B_.201 other.101 26369 18628 aten::Int pnnx_20127 1 1 18628 18629 prim::Constant pnnx_20128 0 1 26370 value=6 prim::ListConstruct pnnx_20129 5 1 18629 18627 26370 18596 18595 18630 prim::Constant pnnx_20131 0 1 26371 value=1 prim::Constant pnnx_20133 0 1 26372 value=0 prim::Constant pnnx_20135 0 1 26373 value=1 Tensor.view Tensor.view_2086 2 1 attn0.65 18630 18631 $input=attn0.65 $shape=18630 #attn0.65=(36,6,64,64)f32 #18631=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3416 2 1 attn_mask.101 26371 18632 $input=attn_mask.101 $dim=26371 #attn_mask.101=(36,64,64)f32 #18632=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3417 2 1 18632 26372 18633 $input=18632 $dim=26372 #18632=(36,1,64,64)f32 #18633=(1,36,1,64,64)f32 aten::add pnnx_20136 3 1 18631 18633 26373 attn1.65 #18631=(1,36,6,64,64)f32 #18633=(1,36,1,64,64)f32 #attn1.65=(1,36,6,64,64)f32 prim::Constant pnnx_20137 0 1 26374 value=-1 prim::Constant pnnx_20138 0 1 26375 value=6 prim::ListConstruct pnnx_20139 4 1 26374 26375 18594 18593 18635 Tensor.view Tensor.view_2087 2 1 attn1.65 18635 input.449 $input=attn1.65 $shape=18635 #attn1.65=(1,36,6,64,64)f32 #input.449=(36,6,64,64)f32 nn.Softmax layers_mmsa.4.residual_group.blocks.3.attn.softmax 1 1 input.449 18637 dim=-1 #input.449=(36,6,64,64)f32 #18637=(36,6,64,64)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.3.attn.attn_drop 1 1 18637 18638 #18637=(36,6,64,64)f32 #18638=(36,6,64,64)f32 Tensor.select Tensor.select_949 3 1 qkv0.129 26357 26358 v.201 $input=qkv0.129 $dim=26357 $index=26358 #qkv0.129=(3,36,6,64,32)f32 #v.201=(36,6,64,32)f32 prim::Constant pnnx_20142 0 1 26376 value=1 prim::Constant pnnx_20143 0 1 26377 value=2 torch.matmul torch.matmul_2403 2 1 18638 v.201 18639 $input=18638 $other=v.201 #18638=(36,6,64,64)f32 #v.201=(36,6,64,32)f32 #18639=(36,6,64,32)f32 prim::ListConstruct pnnx_20145 3 1 18588 18592 18600 18641 torch.transpose torch.transpose_3196 3 1 18639 26376 26377 18640 $input=18639 $dim0=26376 $dim1=26377 #18639=(36,6,64,32)f32 #18640=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_633 2 1 18640 18641 input0.133 $input=18640 $shape=18641 #18640=(36,64,6,32)f32 #input0.133=(36,64,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.3.attn.proj 1 1 input0.133 18643 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.133=(36,64,192)f32 #18643=(36,64,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.3.attn.proj_drop 1 1 18643 18644 #18643=(36,64,192)f32 #18644=(36,64,192)f32 prim::Constant pnnx_20147 0 1 26378 value=-1 prim::Constant pnnx_20148 0 1 26379 value=8 prim::Constant pnnx_20149 0 1 26380 value=8 prim::ListConstruct pnnx_20150 4 1 26378 26379 26380 18534 18645 prim::Constant pnnx_20152 0 1 26381 value=8 prim::Constant pnnx_20153 0 1 26382 value=trunc aten::div pnnx_20154 3 1 H1.1 26381 26382 18647 aten::Int pnnx_20155 1 1 18647 18648 prim::Constant pnnx_20156 0 1 26383 value=8 prim::Constant pnnx_20157 0 1 26384 value=trunc aten::div pnnx_20158 3 1 W1.1 26383 26384 18649 aten::Int pnnx_20159 1 1 18649 18650 prim::Constant pnnx_20160 0 1 26385 value=1 prim::Constant pnnx_20161 0 1 26386 value=8 prim::Constant pnnx_20162 0 1 26387 value=8 prim::Constant pnnx_20163 0 1 26388 value=-1 prim::ListConstruct pnnx_20164 6 1 26385 18648 18650 26386 26387 26388 18651 prim::Constant pnnx_20166 0 1 26389 value=0 prim::Constant pnnx_20167 0 1 26390 value=1 prim::Constant pnnx_20168 0 1 26391 value=3 prim::Constant pnnx_20169 0 1 26392 value=2 prim::Constant pnnx_20170 0 1 26393 value=4 prim::Constant pnnx_20171 0 1 26394 value=5 prim::ListConstruct pnnx_20172 6 1 26389 26390 26391 26392 26393 26394 18653 Tensor.view Tensor.view_2088 2 1 18644 18645 windows.201 $input=18644 $shape=18645 #18644=(36,64,192)f32 #windows.201=(36,8,8,192)f32 Tensor.view Tensor.view_2089 2 1 windows.201 18651 x3.129 $input=windows.201 $shape=18651 #windows.201=(36,8,8,192)f32 #x3.129=(1,6,6,8,8,192)f32 prim::Constant pnnx_20176 0 1 26396 value=1 prim::Constant pnnx_20177 0 1 26397 value=-1 prim::ListConstruct pnnx_20178 4 1 26396 1594 1834 26397 18656 torch.permute torch.permute_2927 2 1 x3.129 18653 18654 $input=x3.129 $dims=18653 #x3.129=(1,6,6,8,8,192)f32 #18654=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_301 1 1 18654 18655 memory_format=torch.contiguous_format $input=18654 #18654=(1,6,8,6,8,192)f32 #18655=(1,6,8,6,8,192)f32 prim::Constant pnnx_20180 0 1 26398 value=4 prim::Constant pnnx_20181 0 1 26399 value=4 prim::ListConstruct pnnx_20182 2 1 26398 26399 18658 prim::Constant pnnx_20183 0 1 26400 value=1 prim::Constant pnnx_20184 0 1 26401 value=2 prim::ListConstruct pnnx_20185 2 1 26400 26401 18659 Tensor.view Tensor.view_2090 2 1 18655 18656 shifted_x.101 $input=18655 $shape=18656 #18655=(1,6,8,6,8,192)f32 #shifted_x.101=(1,48,48,192)f32 aten::mul pnnx_20187 2 1 H1.1 W1.1 18661 aten::Int pnnx_20188 1 1 18661 18662 prim::ListConstruct pnnx_20189 3 1 18529 18662 18533 18663 prim::Constant pnnx_20191 0 1 18665 value=None prim::Constant pnnx_20192 0 1 26402 value=1 torch.roll torch.roll_2519 3 1 shifted_x.101 18658 18659 x4.129 $input=shifted_x.101 $shifts=18658 $dims=18659 #shifted_x.101=(1,48,48,192)f32 #x4.129=(1,48,48,192)f32 Tensor.view Tensor.view_2091 2 1 x4.129 18663 x5.101 $input=x4.129 $shape=18663 #x4.129=(1,48,48,192)f32 #x5.101=(1,2304,192)f32 aten::add pnnx_20193 3 1 18508 x5.101 26402 input.451 #18508=(1,2304,192)f32 #x5.101=(1,2304,192)f32 #input.451=(1,2304,192)f32 nn.LayerNorm layers_mmsa.4.residual_group.blocks.3.norm2 1 1 input.451 18667 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.451=(1,2304,192)f32 #18667=(1,2304,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.3.mlp.fc1 1 1 18667 18672 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18667=(1,2304,192)f32 #18672=(1,2304,384)f32 nn.GELU layers_mmsa.4.residual_group.blocks.3.mlp.act 1 1 18672 18673 #18672=(1,2304,384)f32 #18673=(1,2304,384)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.3.mlp.drop 1 1 18673 18674 #18673=(1,2304,384)f32 #18674=(1,2304,384)f32 nn.Linear layers_mmsa.4.residual_group.blocks.3.mlp.fc2 1 1 18674 18675 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18674=(1,2304,384)f32 #18675=(1,2304,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.3.mlp.drop 1 1 18675 18676 #18675=(1,2304,192)f32 #18676=(1,2304,192)f32 prim::Constant pnnx_20194 0 1 18677 value=None prim::Constant pnnx_20195 0 1 26403 value=1 aten::add pnnx_20196 3 1 input.451 18676 26403 18678 #input.451=(1,2304,192)f32 #18676=(1,2304,192)f32 #18678=(1,2304,192)f32 prim::Constant pnnx_20197 0 1 18679 value=trunc prim::Constant pnnx_20198 0 1 18680 value=8 prim::Constant pnnx_20199 0 1 18681 value=0 prim::Constant pnnx_20200 0 1 18682 value=2 prim::Constant pnnx_20201 0 1 18683 value=1 prim::Constant pnnx_20202 0 1 18684 value=3 prim::Constant pnnx_20203 0 1 18685 value=8 prim::Constant pnnx_20204 0 1 18686 value=4 prim::Constant pnnx_20205 0 1 18687 value=5 prim::Constant pnnx_20206 0 1 18688 value=-1 prim::Constant pnnx_20207 0 1 18689 value=64 aten::size pnnx_20208 2 1 18678 18681 18695 #18678=(1,2304,192)f32 prim::NumToTensor pnnx_20209 1 1 18695 B.241 aten::Int pnnx_20210 1 1 B.241 18697 aten::Int pnnx_20211 1 1 B.241 18698 aten::size pnnx_20212 2 1 18678 18682 18699 #18678=(1,2304,192)f32 prim::NumToTensor pnnx_20213 1 1 18699 C.411 aten::Int pnnx_20214 1 1 C.411 18701 aten::Int pnnx_20215 1 1 C.411 18702 aten::Int pnnx_20216 1 1 C.411 18703 aten::Int pnnx_20217 1 1 C.411 18704 nn.LayerNorm layers_mmsa.4.residual_group.blocks.4.norm1 1 1 18678 18705 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #18678=(1,2304,192)f32 #18705=(1,2304,192)f32 prim::ListConstruct pnnx_20218 4 1 18698 1591 1831 18704 18706 prim::Constant pnnx_20220 0 1 26404 value=0 Tensor.view Tensor.view_2092 2 1 18705 18706 x.203 $input=18705 $shape=18706 #18705=(1,2304,192)f32 #x.203=(1,48,48,192)f32 aten::size pnnx_20221 2 1 x.203 26404 18708 #x.203=(1,48,48,192)f32 prim::NumToTensor pnnx_20222 1 1 18708 B0.131 aten::Int pnnx_20223 1 1 B0.131 18710 aten::size pnnx_20224 2 1 x.203 18683 18711 #x.203=(1,48,48,192)f32 prim::NumToTensor pnnx_20225 1 1 18711 18712 prim::Constant pnnx_20226 0 1 26405 value=2 aten::size pnnx_20227 2 1 x.203 26405 18713 #x.203=(1,48,48,192)f32 prim::NumToTensor pnnx_20228 1 1 18713 18714 aten::size pnnx_20229 2 1 x.203 18684 18715 #x.203=(1,48,48,192)f32 prim::NumToTensor pnnx_20230 1 1 18715 C0.131 aten::Int pnnx_20231 1 1 C0.131 18717 aten::Int pnnx_20232 1 1 C0.131 18718 aten::div pnnx_20233 3 1 18712 18680 18679 18719 aten::Int pnnx_20234 1 1 18719 18720 prim::Constant pnnx_20235 0 1 26406 value=8 prim::Constant pnnx_20236 0 1 26407 value=trunc aten::div pnnx_20237 3 1 18714 26406 26407 18721 aten::Int pnnx_20238 1 1 18721 18722 prim::Constant pnnx_20239 0 1 26408 value=8 prim::ListConstruct pnnx_20240 6 1 18710 18720 18685 18722 26408 18718 18723 prim::Constant pnnx_20242 0 1 26409 value=0 prim::Constant pnnx_20243 0 1 26410 value=1 prim::Constant pnnx_20244 0 1 26411 value=3 prim::Constant pnnx_20245 0 1 26412 value=2 prim::ListConstruct pnnx_20246 6 1 26409 26410 26411 26412 18686 18687 18725 Tensor.view Tensor.view_2093 2 1 x.203 18723 x0.131 $input=x.203 $shape=18723 #x.203=(1,48,48,192)f32 #x0.131=(1,6,8,6,8,192)f32 prim::Constant pnnx_20250 0 1 26414 value=8 prim::Constant pnnx_20251 0 1 26415 value=8 prim::ListConstruct pnnx_20252 4 1 18688 26414 26415 18717 18728 torch.permute torch.permute_2928 2 1 x0.131 18725 18726 $input=x0.131 $dims=18725 #x0.131=(1,6,8,6,8,192)f32 #18726=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_302 1 1 18726 18727 memory_format=torch.contiguous_format $input=18726 #18726=(1,6,6,8,8,192)f32 #18727=(1,6,6,8,8,192)f32 prim::Constant pnnx_20254 0 1 26416 value=-1 prim::ListConstruct pnnx_20255 3 1 26416 18689 18703 18730 prim::Constant pnnx_20257 0 1 18732 value=1.767767e-01 prim::Constant pnnx_20258 0 1 18733 value=trunc prim::Constant pnnx_20259 0 1 18734 value=6 prim::Constant pnnx_20260 0 1 18735 value=0 prim::Constant pnnx_20261 0 1 18736 value=1 prim::Constant pnnx_20262 0 1 18737 value=2 prim::Constant pnnx_20263 0 1 18738 value=3 prim::Constant pnnx_20264 0 1 18739 value=6 prim::Constant pnnx_20265 0 1 18740 value=4 prim::Constant pnnx_20266 0 1 18741 value=-2 prim::Constant pnnx_20267 0 1 18742 value=-1 prim::Constant pnnx_20268 0 1 18743 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.4.attn 0 1 relative_position_bias_table.203 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.203=(225,6)f32 pnnx.Attribute layers_mmsa.4.residual_group.blocks.4.attn 0 1 relative_position_index.203 @relative_position_index=(64,64)i64 #relative_position_index.203=(64,64)i64 Tensor.view Tensor.view_2094 2 1 18727 18728 x_windows.203 $input=18727 $shape=18728 #18727=(1,6,6,8,8,192)f32 #x_windows.203=(36,8,8,192)f32 Tensor.view Tensor.view_2095 2 1 x_windows.203 18730 x1.131 $input=x_windows.203 $shape=18730 #x_windows.203=(36,8,8,192)f32 #x1.131=(36,64,192)f32 aten::size pnnx_20269 2 1 x1.131 18735 18751 #x1.131=(36,64,192)f32 prim::NumToTensor pnnx_20270 1 1 18751 B_.203 aten::Int pnnx_20271 1 1 B_.203 18753 aten::Int pnnx_20272 1 1 B_.203 18754 aten::size pnnx_20273 2 1 x1.131 18736 18755 #x1.131=(36,64,192)f32 prim::NumToTensor pnnx_20274 1 1 18755 N.203 aten::Int pnnx_20275 1 1 N.203 18757 aten::Int pnnx_20276 1 1 N.203 18758 aten::size pnnx_20277 2 1 x1.131 18737 18759 #x1.131=(36,64,192)f32 prim::NumToTensor pnnx_20278 1 1 18759 C.413 aten::Int pnnx_20279 1 1 C.413 18761 nn.Linear layers_mmsa.4.residual_group.blocks.4.attn.qkv 1 1 x1.131 18762 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.131=(36,64,192)f32 #18762=(36,64,576)f32 aten::div pnnx_20280 3 1 C.413 18734 18733 18763 aten::Int pnnx_20281 1 1 18763 18764 prim::ListConstruct pnnx_20282 5 1 18754 18758 18738 18739 18764 18765 prim::Constant pnnx_20284 0 1 26417 value=2 prim::Constant pnnx_20285 0 1 26418 value=0 prim::Constant pnnx_20286 0 1 26419 value=3 prim::Constant pnnx_20287 0 1 26420 value=1 prim::ListConstruct pnnx_20288 5 1 26417 26418 26419 26420 18740 18767 Tensor.reshape Tensor.reshape_634 2 1 18762 18765 18766 $input=18762 $shape=18765 #18762=(36,64,576)f32 #18766=(36,64,3,6,32)f32 prim::Constant pnnx_20290 0 1 26421 value=0 prim::Constant pnnx_20291 0 1 26422 value=0 prim::Constant pnnx_20293 0 1 26423 value=0 prim::Constant pnnx_20294 0 1 26424 value=1 prim::Constant pnnx_20296 0 1 26425 value=0 prim::Constant pnnx_20297 0 1 26426 value=2 torch.permute torch.permute_2929 2 1 18766 18767 qkv0.131 $input=18766 $dims=18767 #18766=(36,64,3,6,32)f32 #qkv0.131=(3,36,6,64,32)f32 Tensor.select Tensor.select_950 3 1 qkv0.131 26421 26422 q.203 $input=qkv0.131 $dim=26421 $index=26422 #qkv0.131=(3,36,6,64,32)f32 #q.203=(36,6,64,32)f32 aten::mul pnnx_20299 2 1 q.203 18732 q0.131 #q.203=(36,6,64,32)f32 #q0.131=(36,6,64,32)f32 Tensor.select Tensor.select_951 3 1 qkv0.131 26423 26424 k.203 $input=qkv0.131 $dim=26423 $index=26424 #qkv0.131=(3,36,6,64,32)f32 #k.203=(36,6,64,32)f32 prim::Constant pnnx_20302 0 1 26427 value=-1 prim::ListConstruct pnnx_20303 1 1 26427 18775 Tensor.view Tensor.view_2096 2 1 relative_position_index.203 18775 18776 $input=relative_position_index.203 $shape=18775 #relative_position_index.203=(64,64)i64 #18776=(4096)i64 prim::ListConstruct pnnx_20305 1 1 18776 18777 #18776=(4096)i64 prim::Constant pnnx_20307 0 1 26428 value=64 prim::Constant pnnx_20308 0 1 26429 value=-1 prim::ListConstruct pnnx_20309 3 1 18743 26428 26429 18779 Tensor.index Tensor.index_426 2 1 relative_position_bias_table.203 18777 18778 $input=relative_position_bias_table.203 $expr=18777 #relative_position_bias_table.203=(225,6)f32 #18778=(4096,6)f32 prim::Constant pnnx_20311 0 1 26430 value=2 prim::Constant pnnx_20312 0 1 26431 value=0 prim::Constant pnnx_20313 0 1 26432 value=1 prim::ListConstruct pnnx_20314 3 1 26430 26431 26432 18781 Tensor.view Tensor.view_2097 2 1 18778 18779 relative_position_bias.203 $input=18778 $shape=18779 #18778=(4096,6)f32 #relative_position_bias.203=(64,64,6)f32 prim::Constant pnnx_20318 0 1 26434 value=0 torch.permute torch.permute_2930 2 1 relative_position_bias.203 18781 18782 $input=relative_position_bias.203 $dims=18781 #relative_position_bias.203=(64,64,6)f32 #18782=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_303 1 1 18782 relative_position_bias0.131 memory_format=torch.contiguous_format $input=18782 #18782=(6,64,64)f32 #relative_position_bias0.131=(6,64,64)f32 prim::Constant pnnx_20320 0 1 26435 value=1 torch.transpose torch.transpose_3197 3 1 k.203 18741 18742 18773 $input=k.203 $dim0=18741 $dim1=18742 #k.203=(36,6,64,32)f32 #18773=(36,6,32,64)f32 torch.matmul torch.matmul_2404 2 1 q0.131 18773 attn.407 $input=q0.131 $other=18773 #q0.131=(36,6,64,32)f32 #18773=(36,6,32,64)f32 #attn.407=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3418 2 1 relative_position_bias0.131 26434 18784 $input=relative_position_bias0.131 $dim=26434 #relative_position_bias0.131=(6,64,64)f32 #18784=(1,6,64,64)f32 aten::add pnnx_20321 3 1 attn.407 18784 26435 input.453 #attn.407=(36,6,64,64)f32 #18784=(1,6,64,64)f32 #input.453=(36,6,64,64)f32 nn.Softmax layers_mmsa.4.residual_group.blocks.4.attn.softmax 1 1 input.453 18786 dim=-1 #input.453=(36,6,64,64)f32 #18786=(36,6,64,64)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.4.attn.attn_drop 1 1 18786 18787 #18786=(36,6,64,64)f32 #18787=(36,6,64,64)f32 Tensor.select Tensor.select_952 3 1 qkv0.131 26425 26426 v.203 $input=qkv0.131 $dim=26425 $index=26426 #qkv0.131=(3,36,6,64,32)f32 #v.203=(36,6,64,32)f32 prim::Constant pnnx_20323 0 1 26436 value=1 prim::Constant pnnx_20324 0 1 26437 value=2 torch.matmul torch.matmul_2405 2 1 18787 v.203 18788 $input=18787 $other=v.203 #18787=(36,6,64,64)f32 #v.203=(36,6,64,32)f32 #18788=(36,6,64,32)f32 prim::ListConstruct pnnx_20326 3 1 18753 18757 18761 18790 torch.transpose torch.transpose_3198 3 1 18788 26436 26437 18789 $input=18788 $dim0=26436 $dim1=26437 #18788=(36,6,64,32)f32 #18789=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_635 2 1 18789 18790 input0.135 $input=18789 $shape=18790 #18789=(36,64,6,32)f32 #input0.135=(36,64,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.4.attn.proj 1 1 input0.135 18792 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.135=(36,64,192)f32 #18792=(36,64,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.4.attn.proj_drop 1 1 18792 18793 #18792=(36,64,192)f32 #18793=(36,64,192)f32 prim::Constant pnnx_20328 0 1 26438 value=-1 prim::Constant pnnx_20329 0 1 26439 value=8 prim::Constant pnnx_20330 0 1 26440 value=8 prim::ListConstruct pnnx_20331 4 1 26438 26439 26440 18702 18794 prim::Constant pnnx_20333 0 1 26441 value=8 prim::Constant pnnx_20334 0 1 26442 value=trunc aten::div pnnx_20335 3 1 H1.1 26441 26442 18796 aten::Int pnnx_20336 1 1 18796 18797 prim::Constant pnnx_20337 0 1 26443 value=8 prim::Constant pnnx_20338 0 1 26444 value=trunc aten::div pnnx_20339 3 1 W1.1 26443 26444 18798 aten::Int pnnx_20340 1 1 18798 18799 prim::Constant pnnx_20341 0 1 26445 value=1 prim::Constant pnnx_20342 0 1 26446 value=8 prim::Constant pnnx_20343 0 1 26447 value=8 prim::Constant pnnx_20344 0 1 26448 value=-1 prim::ListConstruct pnnx_20345 6 1 26445 18797 18799 26446 26447 26448 18800 prim::Constant pnnx_20347 0 1 26449 value=0 prim::Constant pnnx_20348 0 1 26450 value=1 prim::Constant pnnx_20349 0 1 26451 value=3 prim::Constant pnnx_20350 0 1 26452 value=2 prim::Constant pnnx_20351 0 1 26453 value=4 prim::Constant pnnx_20352 0 1 26454 value=5 prim::ListConstruct pnnx_20353 6 1 26449 26450 26451 26452 26453 26454 18802 Tensor.view Tensor.view_2098 2 1 18793 18794 windows.203 $input=18793 $shape=18794 #18793=(36,64,192)f32 #windows.203=(36,8,8,192)f32 Tensor.view Tensor.view_2099 2 1 windows.203 18800 x2.131 $input=windows.203 $shape=18800 #windows.203=(36,8,8,192)f32 #x2.131=(1,6,6,8,8,192)f32 prim::Constant pnnx_20357 0 1 26456 value=1 prim::Constant pnnx_20358 0 1 26457 value=-1 prim::ListConstruct pnnx_20359 4 1 26456 1588 1828 26457 18805 torch.permute torch.permute_2931 2 1 x2.131 18802 18803 $input=x2.131 $dims=18802 #x2.131=(1,6,6,8,8,192)f32 #18803=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_304 1 1 18803 18804 memory_format=torch.contiguous_format $input=18803 #18803=(1,6,8,6,8,192)f32 #18804=(1,6,8,6,8,192)f32 aten::mul pnnx_20361 2 1 H1.1 W1.1 18807 aten::Int pnnx_20362 1 1 18807 18808 prim::ListConstruct pnnx_20363 3 1 18697 18808 18701 18809 prim::Constant pnnx_20365 0 1 18811 value=None prim::Constant pnnx_20366 0 1 26458 value=1 Tensor.view Tensor.view_2100 2 1 18804 18805 x3.131 $input=18804 $shape=18805 #18804=(1,6,8,6,8,192)f32 #x3.131=(1,48,48,192)f32 Tensor.view Tensor.view_2101 2 1 x3.131 18809 x4.131 $input=x3.131 $shape=18809 #x3.131=(1,48,48,192)f32 #x4.131=(1,2304,192)f32 aten::add pnnx_20367 3 1 18678 x4.131 26458 input.455 #18678=(1,2304,192)f32 #x4.131=(1,2304,192)f32 #input.455=(1,2304,192)f32 nn.LayerNorm layers_mmsa.4.residual_group.blocks.4.norm2 1 1 input.455 18813 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.455=(1,2304,192)f32 #18813=(1,2304,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.4.mlp.fc1 1 1 18813 18818 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18813=(1,2304,192)f32 #18818=(1,2304,384)f32 nn.GELU layers_mmsa.4.residual_group.blocks.4.mlp.act 1 1 18818 18819 #18818=(1,2304,384)f32 #18819=(1,2304,384)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.4.mlp.drop 1 1 18819 18820 #18819=(1,2304,384)f32 #18820=(1,2304,384)f32 nn.Linear layers_mmsa.4.residual_group.blocks.4.mlp.fc2 1 1 18820 18821 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18820=(1,2304,384)f32 #18821=(1,2304,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.4.mlp.drop 1 1 18821 18822 #18821=(1,2304,192)f32 #18822=(1,2304,192)f32 prim::Constant pnnx_20368 0 1 18823 value=None prim::Constant pnnx_20369 0 1 26459 value=1 aten::add pnnx_20370 3 1 input.455 18822 26459 18824 #input.455=(1,2304,192)f32 #18822=(1,2304,192)f32 #18824=(1,2304,192)f32 prim::Constant pnnx_20371 0 1 18825 value=trunc prim::Constant pnnx_20372 0 1 18826 value=8 prim::Constant pnnx_20373 0 1 18827 value=0 prim::Constant pnnx_20374 0 1 18828 value=2 prim::Constant pnnx_20375 0 1 18829 value=-4 prim::Constant pnnx_20376 0 1 18830 value=1 prim::Constant pnnx_20377 0 1 18831 value=3 prim::Constant pnnx_20378 0 1 18832 value=8 prim::Constant pnnx_20379 0 1 18833 value=4 prim::Constant pnnx_20380 0 1 18834 value=5 prim::Constant pnnx_20381 0 1 18835 value=-1 prim::Constant pnnx_20382 0 1 18836 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.5 0 1 attn_mask.103 @attn_mask=(36,64,64)f32 #attn_mask.103=(36,64,64)f32 aten::size pnnx_20383 2 1 18824 18827 18843 #18824=(1,2304,192)f32 prim::NumToTensor pnnx_20384 1 1 18843 B.243 aten::Int pnnx_20385 1 1 B.243 18845 aten::Int pnnx_20386 1 1 B.243 18846 aten::size pnnx_20387 2 1 18824 18828 18847 #18824=(1,2304,192)f32 prim::NumToTensor pnnx_20388 1 1 18847 C.415 aten::Int pnnx_20389 1 1 C.415 18849 aten::Int pnnx_20390 1 1 C.415 18850 aten::Int pnnx_20391 1 1 C.415 18851 aten::Int pnnx_20392 1 1 C.415 18852 nn.LayerNorm layers_mmsa.4.residual_group.blocks.5.norm1 1 1 18824 18853 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #18824=(1,2304,192)f32 #18853=(1,2304,192)f32 prim::ListConstruct pnnx_20393 4 1 18846 1585 1825 18852 18854 prim::Constant pnnx_20395 0 1 26460 value=-4 prim::ListConstruct pnnx_20396 2 1 18829 26460 18856 prim::Constant pnnx_20397 0 1 26461 value=2 prim::ListConstruct pnnx_20398 2 1 18830 26461 18857 Tensor.view Tensor.view_2102 2 1 18853 18854 x.205 $input=18853 $shape=18854 #18853=(1,2304,192)f32 #x.205=(1,48,48,192)f32 prim::Constant pnnx_20400 0 1 26462 value=0 torch.roll torch.roll_2520 3 1 x.205 18856 18857 x0.133 $input=x.205 $shifts=18856 $dims=18857 #x.205=(1,48,48,192)f32 #x0.133=(1,48,48,192)f32 aten::size pnnx_20401 2 1 x0.133 26462 18859 #x0.133=(1,48,48,192)f32 prim::NumToTensor pnnx_20402 1 1 18859 B0.133 aten::Int pnnx_20403 1 1 B0.133 18861 prim::Constant pnnx_20404 0 1 26463 value=1 aten::size pnnx_20405 2 1 x0.133 26463 18862 #x0.133=(1,48,48,192)f32 prim::NumToTensor pnnx_20406 1 1 18862 18863 prim::Constant pnnx_20407 0 1 26464 value=2 aten::size pnnx_20408 2 1 x0.133 26464 18864 #x0.133=(1,48,48,192)f32 prim::NumToTensor pnnx_20409 1 1 18864 18865 aten::size pnnx_20410 2 1 x0.133 18831 18866 #x0.133=(1,48,48,192)f32 prim::NumToTensor pnnx_20411 1 1 18866 C0.133 aten::Int pnnx_20412 1 1 C0.133 18868 aten::Int pnnx_20413 1 1 C0.133 18869 aten::div pnnx_20414 3 1 18863 18826 18825 18870 aten::Int pnnx_20415 1 1 18870 18871 prim::Constant pnnx_20416 0 1 26465 value=8 prim::Constant pnnx_20417 0 1 26466 value=trunc aten::div pnnx_20418 3 1 18865 26465 26466 18872 aten::Int pnnx_20419 1 1 18872 18873 prim::Constant pnnx_20420 0 1 26467 value=8 prim::ListConstruct pnnx_20421 6 1 18861 18871 18832 18873 26467 18869 18874 prim::Constant pnnx_20423 0 1 26468 value=0 prim::Constant pnnx_20424 0 1 26469 value=1 prim::Constant pnnx_20425 0 1 26470 value=3 prim::Constant pnnx_20426 0 1 26471 value=2 prim::ListConstruct pnnx_20427 6 1 26468 26469 26470 26471 18833 18834 18876 Tensor.view Tensor.view_2103 2 1 x0.133 18874 x1.133 $input=x0.133 $shape=18874 #x0.133=(1,48,48,192)f32 #x1.133=(1,6,8,6,8,192)f32 prim::Constant pnnx_20431 0 1 26473 value=8 prim::Constant pnnx_20432 0 1 26474 value=8 prim::ListConstruct pnnx_20433 4 1 18835 26473 26474 18868 18879 torch.permute torch.permute_2932 2 1 x1.133 18876 18877 $input=x1.133 $dims=18876 #x1.133=(1,6,8,6,8,192)f32 #18877=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_305 1 1 18877 18878 memory_format=torch.contiguous_format $input=18877 #18877=(1,6,6,8,8,192)f32 #18878=(1,6,6,8,8,192)f32 prim::Constant pnnx_20435 0 1 26475 value=-1 prim::ListConstruct pnnx_20436 3 1 26475 18836 18851 18881 prim::Constant pnnx_20438 0 1 18883 value=1.767767e-01 prim::Constant pnnx_20439 0 1 18884 value=trunc prim::Constant pnnx_20440 0 1 18885 value=6 prim::Constant pnnx_20441 0 1 18886 value=0 prim::Constant pnnx_20442 0 1 18887 value=1 prim::Constant pnnx_20443 0 1 18888 value=2 prim::Constant pnnx_20444 0 1 18889 value=3 prim::Constant pnnx_20445 0 1 18890 value=6 prim::Constant pnnx_20446 0 1 18891 value=4 prim::Constant pnnx_20447 0 1 18892 value=-2 prim::Constant pnnx_20448 0 1 18893 value=-1 prim::Constant pnnx_20449 0 1 18894 value=64 pnnx.Attribute layers_mmsa.4.residual_group.blocks.5.attn 0 1 relative_position_bias_table.205 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.205=(225,6)f32 pnnx.Attribute layers_mmsa.4.residual_group.blocks.5.attn 0 1 relative_position_index.205 @relative_position_index=(64,64)i64 #relative_position_index.205=(64,64)i64 Tensor.view Tensor.view_2104 2 1 18878 18879 x_windows.205 $input=18878 $shape=18879 #18878=(1,6,6,8,8,192)f32 #x_windows.205=(36,8,8,192)f32 Tensor.view Tensor.view_2105 2 1 x_windows.205 18881 x2.133 $input=x_windows.205 $shape=18881 #x_windows.205=(36,8,8,192)f32 #x2.133=(36,64,192)f32 aten::size pnnx_20450 2 1 x2.133 18886 18902 #x2.133=(36,64,192)f32 prim::NumToTensor pnnx_20451 1 1 18902 B_.205 aten::Int pnnx_20452 1 1 B_.205 18904 aten::Int pnnx_20453 1 1 B_.205 18905 aten::size pnnx_20454 2 1 x2.133 18887 18906 #x2.133=(36,64,192)f32 prim::NumToTensor pnnx_20455 1 1 18906 N.205 aten::Int pnnx_20456 1 1 N.205 18908 aten::Int pnnx_20457 1 1 N.205 18909 aten::Int pnnx_20458 1 1 N.205 18910 aten::Int pnnx_20459 1 1 N.205 18911 aten::Int pnnx_20460 1 1 N.205 18912 aten::Int pnnx_20461 1 1 N.205 18913 aten::size pnnx_20462 2 1 x2.133 18888 18914 #x2.133=(36,64,192)f32 prim::NumToTensor pnnx_20463 1 1 18914 C.417 aten::Int pnnx_20464 1 1 C.417 18916 nn.Linear layers_mmsa.4.residual_group.blocks.5.attn.qkv 1 1 x2.133 18917 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.133=(36,64,192)f32 #18917=(36,64,576)f32 aten::div pnnx_20465 3 1 C.417 18885 18884 18918 aten::Int pnnx_20466 1 1 18918 18919 prim::ListConstruct pnnx_20467 5 1 18905 18913 18889 18890 18919 18920 prim::Constant pnnx_20469 0 1 26476 value=2 prim::Constant pnnx_20470 0 1 26477 value=0 prim::Constant pnnx_20471 0 1 26478 value=3 prim::Constant pnnx_20472 0 1 26479 value=1 prim::ListConstruct pnnx_20473 5 1 26476 26477 26478 26479 18891 18922 Tensor.reshape Tensor.reshape_636 2 1 18917 18920 18921 $input=18917 $shape=18920 #18917=(36,64,576)f32 #18921=(36,64,3,6,32)f32 prim::Constant pnnx_20475 0 1 26480 value=0 prim::Constant pnnx_20476 0 1 26481 value=0 prim::Constant pnnx_20478 0 1 26482 value=0 prim::Constant pnnx_20479 0 1 26483 value=1 prim::Constant pnnx_20481 0 1 26484 value=0 prim::Constant pnnx_20482 0 1 26485 value=2 torch.permute torch.permute_2933 2 1 18921 18922 qkv0.133 $input=18921 $dims=18922 #18921=(36,64,3,6,32)f32 #qkv0.133=(3,36,6,64,32)f32 Tensor.select Tensor.select_953 3 1 qkv0.133 26480 26481 q.205 $input=qkv0.133 $dim=26480 $index=26481 #qkv0.133=(3,36,6,64,32)f32 #q.205=(36,6,64,32)f32 aten::mul pnnx_20484 2 1 q.205 18883 q0.133 #q.205=(36,6,64,32)f32 #q0.133=(36,6,64,32)f32 Tensor.select Tensor.select_954 3 1 qkv0.133 26482 26483 k.205 $input=qkv0.133 $dim=26482 $index=26483 #qkv0.133=(3,36,6,64,32)f32 #k.205=(36,6,64,32)f32 prim::Constant pnnx_20487 0 1 26486 value=-1 prim::ListConstruct pnnx_20488 1 1 26486 18930 Tensor.view Tensor.view_2106 2 1 relative_position_index.205 18930 18931 $input=relative_position_index.205 $shape=18930 #relative_position_index.205=(64,64)i64 #18931=(4096)i64 prim::ListConstruct pnnx_20490 1 1 18931 18932 #18931=(4096)i64 prim::Constant pnnx_20492 0 1 26487 value=64 prim::Constant pnnx_20493 0 1 26488 value=-1 prim::ListConstruct pnnx_20494 3 1 18894 26487 26488 18934 Tensor.index Tensor.index_427 2 1 relative_position_bias_table.205 18932 18933 $input=relative_position_bias_table.205 $expr=18932 #relative_position_bias_table.205=(225,6)f32 #18933=(4096,6)f32 prim::Constant pnnx_20496 0 1 26489 value=2 prim::Constant pnnx_20497 0 1 26490 value=0 prim::Constant pnnx_20498 0 1 26491 value=1 prim::ListConstruct pnnx_20499 3 1 26489 26490 26491 18936 Tensor.view Tensor.view_2107 2 1 18933 18934 relative_position_bias.205 $input=18933 $shape=18934 #18933=(4096,6)f32 #relative_position_bias.205=(64,64,6)f32 prim::Constant pnnx_20503 0 1 26493 value=0 torch.permute torch.permute_2934 2 1 relative_position_bias.205 18936 18937 $input=relative_position_bias.205 $dims=18936 #relative_position_bias.205=(64,64,6)f32 #18937=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_306 1 1 18937 relative_position_bias0.133 memory_format=torch.contiguous_format $input=18937 #18937=(6,64,64)f32 #relative_position_bias0.133=(6,64,64)f32 prim::Constant pnnx_20505 0 1 26494 value=1 torch.transpose torch.transpose_3199 3 1 k.205 18892 18893 18928 $input=k.205 $dim0=18892 $dim1=18893 #k.205=(36,6,64,32)f32 #18928=(36,6,32,64)f32 torch.matmul torch.matmul_2406 2 1 q0.133 18928 attn.411 $input=q0.133 $other=18928 #q0.133=(36,6,64,32)f32 #18928=(36,6,32,64)f32 #attn.411=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3419 2 1 relative_position_bias0.133 26493 18939 $input=relative_position_bias0.133 $dim=26493 #relative_position_bias0.133=(6,64,64)f32 #18939=(1,6,64,64)f32 aten::add pnnx_20506 3 1 attn.411 18939 26494 attn0.67 #attn.411=(36,6,64,64)f32 #18939=(1,6,64,64)f32 #attn0.67=(36,6,64,64)f32 prim::Constant pnnx_20507 0 1 26495 value=0 aten::size pnnx_20508 2 1 attn_mask.103 26495 18941 #attn_mask.103=(36,64,64)f32 prim::NumToTensor pnnx_20509 1 1 18941 other.103 aten::Int pnnx_20510 1 1 other.103 18943 prim::Constant pnnx_20511 0 1 26496 value=trunc aten::div pnnx_20512 3 1 B_.205 other.103 26496 18944 aten::Int pnnx_20513 1 1 18944 18945 prim::Constant pnnx_20514 0 1 26497 value=6 prim::ListConstruct pnnx_20515 5 1 18945 18943 26497 18912 18911 18946 prim::Constant pnnx_20517 0 1 26498 value=1 prim::Constant pnnx_20519 0 1 26499 value=0 prim::Constant pnnx_20521 0 1 26500 value=1 Tensor.view Tensor.view_2108 2 1 attn0.67 18946 18947 $input=attn0.67 $shape=18946 #attn0.67=(36,6,64,64)f32 #18947=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3420 2 1 attn_mask.103 26498 18948 $input=attn_mask.103 $dim=26498 #attn_mask.103=(36,64,64)f32 #18948=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3421 2 1 18948 26499 18949 $input=18948 $dim=26499 #18948=(36,1,64,64)f32 #18949=(1,36,1,64,64)f32 aten::add pnnx_20522 3 1 18947 18949 26500 attn1.67 #18947=(1,36,6,64,64)f32 #18949=(1,36,1,64,64)f32 #attn1.67=(1,36,6,64,64)f32 prim::Constant pnnx_20523 0 1 26501 value=-1 prim::Constant pnnx_20524 0 1 26502 value=6 prim::ListConstruct pnnx_20525 4 1 26501 26502 18910 18909 18951 Tensor.view Tensor.view_2109 2 1 attn1.67 18951 input.457 $input=attn1.67 $shape=18951 #attn1.67=(1,36,6,64,64)f32 #input.457=(36,6,64,64)f32 nn.Softmax layers_mmsa.4.residual_group.blocks.5.attn.softmax 1 1 input.457 18953 dim=-1 #input.457=(36,6,64,64)f32 #18953=(36,6,64,64)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.5.attn.attn_drop 1 1 18953 18954 #18953=(36,6,64,64)f32 #18954=(36,6,64,64)f32 Tensor.select Tensor.select_955 3 1 qkv0.133 26484 26485 v.205 $input=qkv0.133 $dim=26484 $index=26485 #qkv0.133=(3,36,6,64,32)f32 #v.205=(36,6,64,32)f32 prim::Constant pnnx_20528 0 1 26503 value=1 prim::Constant pnnx_20529 0 1 26504 value=2 torch.matmul torch.matmul_2407 2 1 18954 v.205 18955 $input=18954 $other=v.205 #18954=(36,6,64,64)f32 #v.205=(36,6,64,32)f32 #18955=(36,6,64,32)f32 prim::ListConstruct pnnx_20531 3 1 18904 18908 18916 18957 torch.transpose torch.transpose_3200 3 1 18955 26503 26504 18956 $input=18955 $dim0=26503 $dim1=26504 #18955=(36,6,64,32)f32 #18956=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_637 2 1 18956 18957 input0.137 $input=18956 $shape=18957 #18956=(36,64,6,32)f32 #input0.137=(36,64,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.5.attn.proj 1 1 input0.137 18959 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.137=(36,64,192)f32 #18959=(36,64,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.5.attn.proj_drop 1 1 18959 18960 #18959=(36,64,192)f32 #18960=(36,64,192)f32 prim::Constant pnnx_20533 0 1 26505 value=-1 prim::Constant pnnx_20534 0 1 26506 value=8 prim::Constant pnnx_20535 0 1 26507 value=8 prim::ListConstruct pnnx_20536 4 1 26505 26506 26507 18850 18961 prim::Constant pnnx_20538 0 1 26508 value=8 prim::Constant pnnx_20539 0 1 26509 value=trunc aten::div pnnx_20540 3 1 H1.1 26508 26509 18963 aten::Int pnnx_20541 1 1 18963 18964 prim::Constant pnnx_20542 0 1 26510 value=8 prim::Constant pnnx_20543 0 1 26511 value=trunc aten::div pnnx_20544 3 1 W1.1 26510 26511 18965 aten::Int pnnx_20545 1 1 18965 18966 prim::Constant pnnx_20546 0 1 26512 value=1 prim::Constant pnnx_20547 0 1 26513 value=8 prim::Constant pnnx_20548 0 1 26514 value=8 prim::Constant pnnx_20549 0 1 26515 value=-1 prim::ListConstruct pnnx_20550 6 1 26512 18964 18966 26513 26514 26515 18967 prim::Constant pnnx_20552 0 1 26516 value=0 prim::Constant pnnx_20553 0 1 26517 value=1 prim::Constant pnnx_20554 0 1 26518 value=3 prim::Constant pnnx_20555 0 1 26519 value=2 prim::Constant pnnx_20556 0 1 26520 value=4 prim::Constant pnnx_20557 0 1 26521 value=5 prim::ListConstruct pnnx_20558 6 1 26516 26517 26518 26519 26520 26521 18969 Tensor.view Tensor.view_2110 2 1 18960 18961 windows.205 $input=18960 $shape=18961 #18960=(36,64,192)f32 #windows.205=(36,8,8,192)f32 Tensor.view Tensor.view_2111 2 1 windows.205 18967 x3.133 $input=windows.205 $shape=18967 #windows.205=(36,8,8,192)f32 #x3.133=(1,6,6,8,8,192)f32 prim::Constant pnnx_20562 0 1 26523 value=1 prim::Constant pnnx_20563 0 1 26524 value=-1 prim::ListConstruct pnnx_20564 4 1 26523 1582 1822 26524 18972 torch.permute torch.permute_2935 2 1 x3.133 18969 18970 $input=x3.133 $dims=18969 #x3.133=(1,6,6,8,8,192)f32 #18970=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_307 1 1 18970 18971 memory_format=torch.contiguous_format $input=18970 #18970=(1,6,8,6,8,192)f32 #18971=(1,6,8,6,8,192)f32 prim::Constant pnnx_20566 0 1 26525 value=4 prim::Constant pnnx_20567 0 1 26526 value=4 prim::ListConstruct pnnx_20568 2 1 26525 26526 18974 prim::Constant pnnx_20569 0 1 26527 value=1 prim::Constant pnnx_20570 0 1 26528 value=2 prim::ListConstruct pnnx_20571 2 1 26527 26528 18975 Tensor.view Tensor.view_2112 2 1 18971 18972 shifted_x.103 $input=18971 $shape=18972 #18971=(1,6,8,6,8,192)f32 #shifted_x.103=(1,48,48,192)f32 aten::mul pnnx_20573 2 1 H1.1 W1.1 18977 aten::Int pnnx_20574 1 1 18977 18978 prim::ListConstruct pnnx_20575 3 1 18845 18978 18849 18979 prim::Constant pnnx_20577 0 1 18981 value=None prim::Constant pnnx_20578 0 1 26529 value=1 torch.roll torch.roll_2521 3 1 shifted_x.103 18974 18975 x4.133 $input=shifted_x.103 $shifts=18974 $dims=18975 #shifted_x.103=(1,48,48,192)f32 #x4.133=(1,48,48,192)f32 Tensor.view Tensor.view_2113 2 1 x4.133 18979 x5.103 $input=x4.133 $shape=18979 #x4.133=(1,48,48,192)f32 #x5.103=(1,2304,192)f32 aten::add pnnx_20579 3 1 18824 x5.103 26529 input.459 #18824=(1,2304,192)f32 #x5.103=(1,2304,192)f32 #input.459=(1,2304,192)f32 nn.LayerNorm layers_mmsa.4.residual_group.blocks.5.norm2 1 1 input.459 18983 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.459=(1,2304,192)f32 #18983=(1,2304,192)f32 nn.Linear layers_mmsa.4.residual_group.blocks.5.mlp.fc1 1 1 18983 18988 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #18983=(1,2304,192)f32 #18988=(1,2304,384)f32 nn.GELU layers_mmsa.4.residual_group.blocks.5.mlp.act 1 1 18988 18989 #18988=(1,2304,384)f32 #18989=(1,2304,384)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.5.mlp.drop 1 1 18989 18990 #18989=(1,2304,384)f32 #18990=(1,2304,384)f32 nn.Linear layers_mmsa.4.residual_group.blocks.5.mlp.fc2 1 1 18990 18991 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #18990=(1,2304,384)f32 #18991=(1,2304,192)f32 nn.Dropout layers_mmsa.4.residual_group.blocks.5.mlp.drop 1 1 18991 18992 #18991=(1,2304,192)f32 #18992=(1,2304,192)f32 prim::Constant pnnx_20580 0 1 18993 value=None prim::Constant pnnx_20581 0 1 26530 value=1 aten::add pnnx_20582 3 1 input.459 18992 26530 18994 #input.459=(1,2304,192)f32 #18992=(1,2304,192)f32 #18994=(1,2304,192)f32 prim::Constant pnnx_20583 0 1 18995 value=0 prim::Constant pnnx_20584 0 1 18996 value=1 prim::Constant pnnx_20585 0 1 18997 value=2 prim::Constant pnnx_20586 0 1 18998 value=192 aten::size pnnx_20587 2 1 18994 18995 18999 #18994=(1,2304,192)f32 prim::NumToTensor pnnx_20588 1 1 18999 B.245 aten::Int pnnx_20589 1 1 B.245 19001 prim::ListConstruct pnnx_20591 4 1 19001 18998 1579 1819 19003 torch.transpose torch.transpose_3201 3 1 18994 18996 18997 19002 $input=18994 $dim0=18996 $dim1=18997 #18994=(1,2304,192)f32 #19002=(1,192,2304)f32 Tensor.view Tensor.view_2114 2 1 19002 19003 input.461 $input=19002 $shape=19003 #19002=(1,192,2304)f32 #input.461=(1,192,48,48)f32 nn.Conv2d layers_mmsa.4.conv 1 1 input.461 19005 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.461=(1,192,48,48)f32 #19005=(1,192,48,48)f32 prim::Constant pnnx_20593 0 1 19006 value=-1 prim::Constant pnnx_20594 0 1 19007 value=2 prim::Constant pnnx_20595 0 1 19008 value=1 prim::Constant pnnx_20597 0 1 26531 value=2 torch.flatten torch.flatten_2202 3 1 19005 19007 19006 19009 $input=19005 $start_dim=19007 $end_dim=19006 #19005=(1,192,48,48)f32 #19009=(1,192,2304)f32 torch.transpose torch.transpose_3202 3 1 19009 19008 26531 19010 $input=19009 $dim0=19008 $dim1=26531 #19009=(1,192,2304)f32 #19010=(1,2304,192)f32 aten::add pnnx_20599 3 1 19010 18029 18030 19011 #19010=(1,2304,192)f32 #18029=(1,2304,192)f32 #19011=(1,2304,192)f32 prim::Constant pnnx_20600 0 1 19012 value=1 prim::Constant pnnx_20601 0 1 19029 value=trunc prim::Constant pnnx_20602 0 1 19030 value=8 prim::Constant pnnx_20603 0 1 19031 value=0 prim::Constant pnnx_20604 0 1 19032 value=2 prim::Constant pnnx_20605 0 1 19033 value=1 prim::Constant pnnx_20606 0 1 19034 value=3 prim::Constant pnnx_20607 0 1 19035 value=8 prim::Constant pnnx_20608 0 1 19036 value=4 prim::Constant pnnx_20609 0 1 19037 value=5 prim::Constant pnnx_20610 0 1 19038 value=-1 prim::Constant pnnx_20611 0 1 19039 value=64 aten::size pnnx_20612 2 1 19011 19031 19045 #19011=(1,2304,192)f32 prim::NumToTensor pnnx_20613 1 1 19045 B.2 aten::Int pnnx_20614 1 1 B.2 19047 aten::Int pnnx_20615 1 1 B.2 19048 aten::size pnnx_20616 2 1 19011 19032 19049 #19011=(1,2304,192)f32 prim::NumToTensor pnnx_20617 1 1 19049 C.10 aten::Int pnnx_20618 1 1 C.10 19051 aten::Int pnnx_20619 1 1 C.10 19052 aten::Int pnnx_20620 1 1 C.10 19053 aten::Int pnnx_20621 1 1 C.10 19054 nn.LayerNorm layers_mmsa.5.residual_group.blocks.0.norm1 1 1 19011 19055 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19011=(1,2304,192)f32 #19055=(1,2304,192)f32 prim::ListConstruct pnnx_20622 4 1 19048 1576 1816 19054 19056 prim::Constant pnnx_20624 0 1 26532 value=0 Tensor.view Tensor.view_2115 2 1 19055 19056 x.2 $input=19055 $shape=19056 #19055=(1,2304,192)f32 #x.2=(1,48,48,192)f32 aten::size pnnx_20625 2 1 x.2 26532 19058 #x.2=(1,48,48,192)f32 prim::NumToTensor pnnx_20626 1 1 19058 B0.2 aten::Int pnnx_20627 1 1 B0.2 19060 aten::size pnnx_20628 2 1 x.2 19033 19061 #x.2=(1,48,48,192)f32 prim::NumToTensor pnnx_20629 1 1 19061 19062 prim::Constant pnnx_20630 0 1 26533 value=2 aten::size pnnx_20631 2 1 x.2 26533 19063 #x.2=(1,48,48,192)f32 prim::NumToTensor pnnx_20632 1 1 19063 19064 aten::size pnnx_20633 2 1 x.2 19034 19065 #x.2=(1,48,48,192)f32 prim::NumToTensor pnnx_20634 1 1 19065 C0.2 aten::Int pnnx_20635 1 1 C0.2 19067 aten::Int pnnx_20636 1 1 C0.2 19068 aten::div pnnx_20637 3 1 19062 19030 19029 19069 aten::Int pnnx_20638 1 1 19069 19070 prim::Constant pnnx_20639 0 1 26534 value=8 prim::Constant pnnx_20640 0 1 26535 value=trunc aten::div pnnx_20641 3 1 19064 26534 26535 19071 aten::Int pnnx_20642 1 1 19071 19072 prim::Constant pnnx_20643 0 1 26536 value=8 prim::ListConstruct pnnx_20644 6 1 19060 19070 19035 19072 26536 19068 19073 prim::Constant pnnx_20646 0 1 26537 value=0 prim::Constant pnnx_20647 0 1 26538 value=1 prim::Constant pnnx_20648 0 1 26539 value=3 prim::Constant pnnx_20649 0 1 26540 value=2 prim::ListConstruct pnnx_20650 6 1 26537 26538 26539 26540 19036 19037 19075 Tensor.view Tensor.view_2116 2 1 x.2 19073 x0.2 $input=x.2 $shape=19073 #x.2=(1,48,48,192)f32 #x0.2=(1,6,8,6,8,192)f32 prim::Constant pnnx_20654 0 1 26542 value=8 prim::Constant pnnx_20655 0 1 26543 value=8 prim::ListConstruct pnnx_20656 4 1 19038 26542 26543 19067 19078 torch.permute torch.permute_2936 2 1 x0.2 19075 19076 $input=x0.2 $dims=19075 #x0.2=(1,6,8,6,8,192)f32 #19076=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_308 1 1 19076 19077 memory_format=torch.contiguous_format $input=19076 #19076=(1,6,6,8,8,192)f32 #19077=(1,6,6,8,8,192)f32 prim::Constant pnnx_20658 0 1 26544 value=-1 prim::ListConstruct pnnx_20659 3 1 26544 19039 19053 19080 prim::Constant pnnx_20661 0 1 19082 value=1.767767e-01 prim::Constant pnnx_20662 0 1 19083 value=trunc prim::Constant pnnx_20663 0 1 19084 value=6 prim::Constant pnnx_20664 0 1 19085 value=0 prim::Constant pnnx_20665 0 1 19086 value=1 prim::Constant pnnx_20666 0 1 19087 value=2 prim::Constant pnnx_20667 0 1 19088 value=3 prim::Constant pnnx_20668 0 1 19089 value=6 prim::Constant pnnx_20669 0 1 19090 value=4 prim::Constant pnnx_20670 0 1 19091 value=-2 prim::Constant pnnx_20671 0 1 19092 value=-1 prim::Constant pnnx_20672 0 1 19093 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.0.attn 0 1 relative_position_bias_table.2 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.2=(225,6)f32 pnnx.Attribute layers_mmsa.5.residual_group.blocks.0.attn 0 1 relative_position_index.2 @relative_position_index=(64,64)i64 #relative_position_index.2=(64,64)i64 Tensor.view Tensor.view_2117 2 1 19077 19078 x_windows.2 $input=19077 $shape=19078 #19077=(1,6,6,8,8,192)f32 #x_windows.2=(36,8,8,192)f32 Tensor.view Tensor.view_2118 2 1 x_windows.2 19080 x1.2 $input=x_windows.2 $shape=19080 #x_windows.2=(36,8,8,192)f32 #x1.2=(36,64,192)f32 aten::size pnnx_20673 2 1 x1.2 19085 19101 #x1.2=(36,64,192)f32 prim::NumToTensor pnnx_20674 1 1 19101 B_.2 aten::Int pnnx_20675 1 1 B_.2 19103 aten::Int pnnx_20676 1 1 B_.2 19104 aten::size pnnx_20677 2 1 x1.2 19086 19105 #x1.2=(36,64,192)f32 prim::NumToTensor pnnx_20678 1 1 19105 N.2 aten::Int pnnx_20679 1 1 N.2 19107 aten::Int pnnx_20680 1 1 N.2 19108 aten::size pnnx_20681 2 1 x1.2 19087 19109 #x1.2=(36,64,192)f32 prim::NumToTensor pnnx_20682 1 1 19109 C.12 aten::Int pnnx_20683 1 1 C.12 19111 nn.Linear layers_mmsa.5.residual_group.blocks.0.attn.qkv 1 1 x1.2 19112 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.2=(36,64,192)f32 #19112=(36,64,576)f32 aten::div pnnx_20684 3 1 C.12 19084 19083 19113 aten::Int pnnx_20685 1 1 19113 19114 prim::ListConstruct pnnx_20686 5 1 19104 19108 19088 19089 19114 19115 prim::Constant pnnx_20688 0 1 26545 value=2 prim::Constant pnnx_20689 0 1 26546 value=0 prim::Constant pnnx_20690 0 1 26547 value=3 prim::Constant pnnx_20691 0 1 26548 value=1 prim::ListConstruct pnnx_20692 5 1 26545 26546 26547 26548 19090 19117 Tensor.reshape Tensor.reshape_638 2 1 19112 19115 19116 $input=19112 $shape=19115 #19112=(36,64,576)f32 #19116=(36,64,3,6,32)f32 prim::Constant pnnx_20694 0 1 26549 value=0 prim::Constant pnnx_20695 0 1 26550 value=0 prim::Constant pnnx_20697 0 1 26551 value=0 prim::Constant pnnx_20698 0 1 26552 value=1 prim::Constant pnnx_20700 0 1 26553 value=0 prim::Constant pnnx_20701 0 1 26554 value=2 torch.permute torch.permute_2937 2 1 19116 19117 qkv0.2 $input=19116 $dims=19117 #19116=(36,64,3,6,32)f32 #qkv0.2=(3,36,6,64,32)f32 Tensor.select Tensor.select_956 3 1 qkv0.2 26549 26550 q.2 $input=qkv0.2 $dim=26549 $index=26550 #qkv0.2=(3,36,6,64,32)f32 #q.2=(36,6,64,32)f32 aten::mul pnnx_20703 2 1 q.2 19082 q0.2 #q.2=(36,6,64,32)f32 #q0.2=(36,6,64,32)f32 Tensor.select Tensor.select_957 3 1 qkv0.2 26551 26552 k.2 $input=qkv0.2 $dim=26551 $index=26552 #qkv0.2=(3,36,6,64,32)f32 #k.2=(36,6,64,32)f32 prim::Constant pnnx_20706 0 1 26555 value=-1 prim::ListConstruct pnnx_20707 1 1 26555 19125 Tensor.view Tensor.view_2119 2 1 relative_position_index.2 19125 19126 $input=relative_position_index.2 $shape=19125 #relative_position_index.2=(64,64)i64 #19126=(4096)i64 prim::ListConstruct pnnx_20709 1 1 19126 19127 #19126=(4096)i64 prim::Constant pnnx_20711 0 1 26556 value=64 prim::Constant pnnx_20712 0 1 26557 value=-1 prim::ListConstruct pnnx_20713 3 1 19093 26556 26557 19129 Tensor.index Tensor.index_428 2 1 relative_position_bias_table.2 19127 19128 $input=relative_position_bias_table.2 $expr=19127 #relative_position_bias_table.2=(225,6)f32 #19128=(4096,6)f32 prim::Constant pnnx_20715 0 1 26558 value=2 prim::Constant pnnx_20716 0 1 26559 value=0 prim::Constant pnnx_20717 0 1 26560 value=1 prim::ListConstruct pnnx_20718 3 1 26558 26559 26560 19131 Tensor.view Tensor.view_2120 2 1 19128 19129 relative_position_bias.2 $input=19128 $shape=19129 #19128=(4096,6)f32 #relative_position_bias.2=(64,64,6)f32 prim::Constant pnnx_20722 0 1 26562 value=0 torch.permute torch.permute_2938 2 1 relative_position_bias.2 19131 19132 $input=relative_position_bias.2 $dims=19131 #relative_position_bias.2=(64,64,6)f32 #19132=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_309 1 1 19132 relative_position_bias0.2 memory_format=torch.contiguous_format $input=19132 #19132=(6,64,64)f32 #relative_position_bias0.2=(6,64,64)f32 prim::Constant pnnx_20724 0 1 26563 value=1 torch.transpose torch.transpose_3203 3 1 k.2 19091 19092 19123 $input=k.2 $dim0=19091 $dim1=19092 #k.2=(36,6,64,32)f32 #19123=(36,6,32,64)f32 torch.matmul torch.matmul_2408 2 1 q0.2 19123 attn.6 $input=q0.2 $other=19123 #q0.2=(36,6,64,32)f32 #19123=(36,6,32,64)f32 #attn.6=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3422 2 1 relative_position_bias0.2 26562 19134 $input=relative_position_bias0.2 $dim=26562 #relative_position_bias0.2=(6,64,64)f32 #19134=(1,6,64,64)f32 aten::add pnnx_20725 3 1 attn.6 19134 26563 input.2 #attn.6=(36,6,64,64)f32 #19134=(1,6,64,64)f32 #input.2=(36,6,64,64)f32 nn.Softmax layers_mmsa.5.residual_group.blocks.0.attn.softmax 1 1 input.2 19136 dim=-1 #input.2=(36,6,64,64)f32 #19136=(36,6,64,64)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.0.attn.attn_drop 1 1 19136 19137 #19136=(36,6,64,64)f32 #19137=(36,6,64,64)f32 Tensor.select Tensor.select_958 3 1 qkv0.2 26553 26554 v.2 $input=qkv0.2 $dim=26553 $index=26554 #qkv0.2=(3,36,6,64,32)f32 #v.2=(36,6,64,32)f32 prim::Constant pnnx_20727 0 1 26564 value=1 prim::Constant pnnx_20728 0 1 26565 value=2 torch.matmul torch.matmul_2409 2 1 19137 v.2 19138 $input=19137 $other=v.2 #19137=(36,6,64,64)f32 #v.2=(36,6,64,32)f32 #19138=(36,6,64,32)f32 prim::ListConstruct pnnx_20730 3 1 19103 19107 19111 19140 torch.transpose torch.transpose_3204 3 1 19138 26564 26565 19139 $input=19138 $dim0=26564 $dim1=26565 #19138=(36,6,64,32)f32 #19139=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_639 2 1 19139 19140 input0.2 $input=19139 $shape=19140 #19139=(36,64,6,32)f32 #input0.2=(36,64,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.0.attn.proj 1 1 input0.2 19142 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.2=(36,64,192)f32 #19142=(36,64,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.0.attn.proj_drop 1 1 19142 19143 #19142=(36,64,192)f32 #19143=(36,64,192)f32 prim::Constant pnnx_20732 0 1 26566 value=-1 prim::Constant pnnx_20733 0 1 26567 value=8 prim::Constant pnnx_20734 0 1 26568 value=8 prim::ListConstruct pnnx_20735 4 1 26566 26567 26568 19052 19144 prim::Constant pnnx_20737 0 1 26569 value=8 prim::Constant pnnx_20738 0 1 26570 value=trunc aten::div pnnx_20739 3 1 H1.1 26569 26570 19146 aten::Int pnnx_20740 1 1 19146 19147 prim::Constant pnnx_20741 0 1 26571 value=8 prim::Constant pnnx_20742 0 1 26572 value=trunc aten::div pnnx_20743 3 1 W1.1 26571 26572 19148 aten::Int pnnx_20744 1 1 19148 19149 prim::Constant pnnx_20745 0 1 26573 value=1 prim::Constant pnnx_20746 0 1 26574 value=8 prim::Constant pnnx_20747 0 1 26575 value=8 prim::Constant pnnx_20748 0 1 26576 value=-1 prim::ListConstruct pnnx_20749 6 1 26573 19147 19149 26574 26575 26576 19150 prim::Constant pnnx_20751 0 1 26577 value=0 prim::Constant pnnx_20752 0 1 26578 value=1 prim::Constant pnnx_20753 0 1 26579 value=3 prim::Constant pnnx_20754 0 1 26580 value=2 prim::Constant pnnx_20755 0 1 26581 value=4 prim::Constant pnnx_20756 0 1 26582 value=5 prim::ListConstruct pnnx_20757 6 1 26577 26578 26579 26580 26581 26582 19152 Tensor.view Tensor.view_2121 2 1 19143 19144 windows.2 $input=19143 $shape=19144 #19143=(36,64,192)f32 #windows.2=(36,8,8,192)f32 Tensor.view Tensor.view_2122 2 1 windows.2 19150 x2.2 $input=windows.2 $shape=19150 #windows.2=(36,8,8,192)f32 #x2.2=(1,6,6,8,8,192)f32 prim::Constant pnnx_20761 0 1 26584 value=1 prim::Constant pnnx_20762 0 1 26585 value=-1 prim::ListConstruct pnnx_20763 4 1 26584 1573 1813 26585 19155 torch.permute torch.permute_2939 2 1 x2.2 19152 19153 $input=x2.2 $dims=19152 #x2.2=(1,6,6,8,8,192)f32 #19153=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_310 1 1 19153 19154 memory_format=torch.contiguous_format $input=19153 #19153=(1,6,8,6,8,192)f32 #19154=(1,6,8,6,8,192)f32 aten::mul pnnx_20765 2 1 H1.1 W1.1 19157 aten::Int pnnx_20766 1 1 19157 19158 prim::ListConstruct pnnx_20767 3 1 19047 19158 19051 19159 prim::Constant pnnx_20769 0 1 19161 value=None prim::Constant pnnx_20770 0 1 26586 value=1 Tensor.view Tensor.view_2123 2 1 19154 19155 x3.2 $input=19154 $shape=19155 #19154=(1,6,8,6,8,192)f32 #x3.2=(1,48,48,192)f32 Tensor.view Tensor.view_2124 2 1 x3.2 19159 x4.2 $input=x3.2 $shape=19159 #x3.2=(1,48,48,192)f32 #x4.2=(1,2304,192)f32 aten::add pnnx_20771 3 1 19011 x4.2 26586 input.6 #19011=(1,2304,192)f32 #x4.2=(1,2304,192)f32 #input.6=(1,2304,192)f32 nn.LayerNorm layers_mmsa.5.residual_group.blocks.0.norm2 1 1 input.6 19163 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.6=(1,2304,192)f32 #19163=(1,2304,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.0.mlp.fc1 1 1 19163 19168 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #19163=(1,2304,192)f32 #19168=(1,2304,384)f32 nn.GELU layers_mmsa.5.residual_group.blocks.0.mlp.act 1 1 19168 19169 #19168=(1,2304,384)f32 #19169=(1,2304,384)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.0.mlp.drop 1 1 19169 19170 #19169=(1,2304,384)f32 #19170=(1,2304,384)f32 nn.Linear layers_mmsa.5.residual_group.blocks.0.mlp.fc2 1 1 19170 19171 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #19170=(1,2304,384)f32 #19171=(1,2304,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.0.mlp.drop 1 1 19171 19172 #19171=(1,2304,192)f32 #19172=(1,2304,192)f32 prim::Constant pnnx_20772 0 1 19173 value=None prim::Constant pnnx_20773 0 1 26587 value=1 aten::add pnnx_20774 3 1 input.6 19172 26587 19174 #input.6=(1,2304,192)f32 #19172=(1,2304,192)f32 #19174=(1,2304,192)f32 prim::Constant pnnx_20775 0 1 19175 value=trunc prim::Constant pnnx_20776 0 1 19176 value=8 prim::Constant pnnx_20777 0 1 19177 value=0 prim::Constant pnnx_20778 0 1 19178 value=2 prim::Constant pnnx_20779 0 1 19179 value=-4 prim::Constant pnnx_20780 0 1 19180 value=1 prim::Constant pnnx_20781 0 1 19181 value=3 prim::Constant pnnx_20782 0 1 19182 value=8 prim::Constant pnnx_20783 0 1 19183 value=4 prim::Constant pnnx_20784 0 1 19184 value=5 prim::Constant pnnx_20785 0 1 19185 value=-1 prim::Constant pnnx_20786 0 1 19186 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.1 0 1 attn_mask.2 @attn_mask=(36,64,64)f32 #attn_mask.2=(36,64,64)f32 aten::size pnnx_20787 2 1 19174 19177 19193 #19174=(1,2304,192)f32 prim::NumToTensor pnnx_20788 1 1 19193 B.4 aten::Int pnnx_20789 1 1 B.4 19195 aten::Int pnnx_20790 1 1 B.4 19196 aten::size pnnx_20791 2 1 19174 19178 19197 #19174=(1,2304,192)f32 prim::NumToTensor pnnx_20792 1 1 19197 C.14 aten::Int pnnx_20793 1 1 C.14 19199 aten::Int pnnx_20794 1 1 C.14 19200 aten::Int pnnx_20795 1 1 C.14 19201 aten::Int pnnx_20796 1 1 C.14 19202 nn.LayerNorm layers_mmsa.5.residual_group.blocks.1.norm1 1 1 19174 19203 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19174=(1,2304,192)f32 #19203=(1,2304,192)f32 prim::ListConstruct pnnx_20797 4 1 19196 1570 1810 19202 19204 prim::Constant pnnx_20799 0 1 26588 value=-4 prim::ListConstruct pnnx_20800 2 1 19179 26588 19206 prim::Constant pnnx_20801 0 1 26589 value=2 prim::ListConstruct pnnx_20802 2 1 19180 26589 19207 Tensor.view Tensor.view_2125 2 1 19203 19204 x.4 $input=19203 $shape=19204 #19203=(1,2304,192)f32 #x.4=(1,48,48,192)f32 prim::Constant pnnx_20804 0 1 26590 value=0 torch.roll torch.roll_2522 3 1 x.4 19206 19207 x0.4 $input=x.4 $shifts=19206 $dims=19207 #x.4=(1,48,48,192)f32 #x0.4=(1,48,48,192)f32 aten::size pnnx_20805 2 1 x0.4 26590 19209 #x0.4=(1,48,48,192)f32 prim::NumToTensor pnnx_20806 1 1 19209 B0.4 aten::Int pnnx_20807 1 1 B0.4 19211 prim::Constant pnnx_20808 0 1 26591 value=1 aten::size pnnx_20809 2 1 x0.4 26591 19212 #x0.4=(1,48,48,192)f32 prim::NumToTensor pnnx_20810 1 1 19212 19213 prim::Constant pnnx_20811 0 1 26592 value=2 aten::size pnnx_20812 2 1 x0.4 26592 19214 #x0.4=(1,48,48,192)f32 prim::NumToTensor pnnx_20813 1 1 19214 19215 aten::size pnnx_20814 2 1 x0.4 19181 19216 #x0.4=(1,48,48,192)f32 prim::NumToTensor pnnx_20815 1 1 19216 C0.4 aten::Int pnnx_20816 1 1 C0.4 19218 aten::Int pnnx_20817 1 1 C0.4 19219 aten::div pnnx_20818 3 1 19213 19176 19175 19220 aten::Int pnnx_20819 1 1 19220 19221 prim::Constant pnnx_20820 0 1 26593 value=8 prim::Constant pnnx_20821 0 1 26594 value=trunc aten::div pnnx_20822 3 1 19215 26593 26594 19222 aten::Int pnnx_20823 1 1 19222 19223 prim::Constant pnnx_20824 0 1 26595 value=8 prim::ListConstruct pnnx_20825 6 1 19211 19221 19182 19223 26595 19219 19224 prim::Constant pnnx_20827 0 1 26596 value=0 prim::Constant pnnx_20828 0 1 26597 value=1 prim::Constant pnnx_20829 0 1 26598 value=3 prim::Constant pnnx_20830 0 1 26599 value=2 prim::ListConstruct pnnx_20831 6 1 26596 26597 26598 26599 19183 19184 19226 Tensor.view Tensor.view_2126 2 1 x0.4 19224 x1.4 $input=x0.4 $shape=19224 #x0.4=(1,48,48,192)f32 #x1.4=(1,6,8,6,8,192)f32 prim::Constant pnnx_20835 0 1 26601 value=8 prim::Constant pnnx_20836 0 1 26602 value=8 prim::ListConstruct pnnx_20837 4 1 19185 26601 26602 19218 19229 torch.permute torch.permute_2940 2 1 x1.4 19226 19227 $input=x1.4 $dims=19226 #x1.4=(1,6,8,6,8,192)f32 #19227=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_311 1 1 19227 19228 memory_format=torch.contiguous_format $input=19227 #19227=(1,6,6,8,8,192)f32 #19228=(1,6,6,8,8,192)f32 prim::Constant pnnx_20839 0 1 26603 value=-1 prim::ListConstruct pnnx_20840 3 1 26603 19186 19201 19231 prim::Constant pnnx_20842 0 1 19233 value=1.767767e-01 prim::Constant pnnx_20843 0 1 19234 value=trunc prim::Constant pnnx_20844 0 1 19235 value=6 prim::Constant pnnx_20845 0 1 19236 value=0 prim::Constant pnnx_20846 0 1 19237 value=1 prim::Constant pnnx_20847 0 1 19238 value=2 prim::Constant pnnx_20848 0 1 19239 value=3 prim::Constant pnnx_20849 0 1 19240 value=6 prim::Constant pnnx_20850 0 1 19241 value=4 prim::Constant pnnx_20851 0 1 19242 value=-2 prim::Constant pnnx_20852 0 1 19243 value=-1 prim::Constant pnnx_20853 0 1 19244 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.1.attn 0 1 relative_position_bias_table.4 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.4=(225,6)f32 pnnx.Attribute layers_mmsa.5.residual_group.blocks.1.attn 0 1 relative_position_index.4 @relative_position_index=(64,64)i64 #relative_position_index.4=(64,64)i64 Tensor.view Tensor.view_2127 2 1 19228 19229 x_windows.4 $input=19228 $shape=19229 #19228=(1,6,6,8,8,192)f32 #x_windows.4=(36,8,8,192)f32 Tensor.view Tensor.view_2128 2 1 x_windows.4 19231 x2.4 $input=x_windows.4 $shape=19231 #x_windows.4=(36,8,8,192)f32 #x2.4=(36,64,192)f32 aten::size pnnx_20854 2 1 x2.4 19236 19252 #x2.4=(36,64,192)f32 prim::NumToTensor pnnx_20855 1 1 19252 B_.4 aten::Int pnnx_20856 1 1 B_.4 19254 aten::Int pnnx_20857 1 1 B_.4 19255 aten::size pnnx_20858 2 1 x2.4 19237 19256 #x2.4=(36,64,192)f32 prim::NumToTensor pnnx_20859 1 1 19256 N.4 aten::Int pnnx_20860 1 1 N.4 19258 aten::Int pnnx_20861 1 1 N.4 19259 aten::Int pnnx_20862 1 1 N.4 19260 aten::Int pnnx_20863 1 1 N.4 19261 aten::Int pnnx_20864 1 1 N.4 19262 aten::Int pnnx_20865 1 1 N.4 19263 aten::size pnnx_20866 2 1 x2.4 19238 19264 #x2.4=(36,64,192)f32 prim::NumToTensor pnnx_20867 1 1 19264 C.16 aten::Int pnnx_20868 1 1 C.16 19266 nn.Linear layers_mmsa.5.residual_group.blocks.1.attn.qkv 1 1 x2.4 19267 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.4=(36,64,192)f32 #19267=(36,64,576)f32 aten::div pnnx_20869 3 1 C.16 19235 19234 19268 aten::Int pnnx_20870 1 1 19268 19269 prim::ListConstruct pnnx_20871 5 1 19255 19263 19239 19240 19269 19270 prim::Constant pnnx_20873 0 1 26604 value=2 prim::Constant pnnx_20874 0 1 26605 value=0 prim::Constant pnnx_20875 0 1 26606 value=3 prim::Constant pnnx_20876 0 1 26607 value=1 prim::ListConstruct pnnx_20877 5 1 26604 26605 26606 26607 19241 19272 Tensor.reshape Tensor.reshape_640 2 1 19267 19270 19271 $input=19267 $shape=19270 #19267=(36,64,576)f32 #19271=(36,64,3,6,32)f32 prim::Constant pnnx_20879 0 1 26608 value=0 prim::Constant pnnx_20880 0 1 26609 value=0 prim::Constant pnnx_20882 0 1 26610 value=0 prim::Constant pnnx_20883 0 1 26611 value=1 prim::Constant pnnx_20885 0 1 26612 value=0 prim::Constant pnnx_20886 0 1 26613 value=2 torch.permute torch.permute_2941 2 1 19271 19272 qkv0.4 $input=19271 $dims=19272 #19271=(36,64,3,6,32)f32 #qkv0.4=(3,36,6,64,32)f32 Tensor.select Tensor.select_959 3 1 qkv0.4 26608 26609 q.4 $input=qkv0.4 $dim=26608 $index=26609 #qkv0.4=(3,36,6,64,32)f32 #q.4=(36,6,64,32)f32 aten::mul pnnx_20888 2 1 q.4 19233 q0.4 #q.4=(36,6,64,32)f32 #q0.4=(36,6,64,32)f32 Tensor.select Tensor.select_960 3 1 qkv0.4 26610 26611 k.4 $input=qkv0.4 $dim=26610 $index=26611 #qkv0.4=(3,36,6,64,32)f32 #k.4=(36,6,64,32)f32 prim::Constant pnnx_20891 0 1 26614 value=-1 prim::ListConstruct pnnx_20892 1 1 26614 19280 Tensor.view Tensor.view_2129 2 1 relative_position_index.4 19280 19281 $input=relative_position_index.4 $shape=19280 #relative_position_index.4=(64,64)i64 #19281=(4096)i64 prim::ListConstruct pnnx_20894 1 1 19281 19282 #19281=(4096)i64 prim::Constant pnnx_20896 0 1 26615 value=64 prim::Constant pnnx_20897 0 1 26616 value=-1 prim::ListConstruct pnnx_20898 3 1 19244 26615 26616 19284 Tensor.index Tensor.index_429 2 1 relative_position_bias_table.4 19282 19283 $input=relative_position_bias_table.4 $expr=19282 #relative_position_bias_table.4=(225,6)f32 #19283=(4096,6)f32 prim::Constant pnnx_20900 0 1 26617 value=2 prim::Constant pnnx_20901 0 1 26618 value=0 prim::Constant pnnx_20902 0 1 26619 value=1 prim::ListConstruct pnnx_20903 3 1 26617 26618 26619 19286 Tensor.view Tensor.view_2130 2 1 19283 19284 relative_position_bias.4 $input=19283 $shape=19284 #19283=(4096,6)f32 #relative_position_bias.4=(64,64,6)f32 prim::Constant pnnx_20907 0 1 26621 value=0 torch.permute torch.permute_2942 2 1 relative_position_bias.4 19286 19287 $input=relative_position_bias.4 $dims=19286 #relative_position_bias.4=(64,64,6)f32 #19287=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_312 1 1 19287 relative_position_bias0.4 memory_format=torch.contiguous_format $input=19287 #19287=(6,64,64)f32 #relative_position_bias0.4=(6,64,64)f32 prim::Constant pnnx_20909 0 1 26622 value=1 torch.transpose torch.transpose_3205 3 1 k.4 19242 19243 19278 $input=k.4 $dim0=19242 $dim1=19243 #k.4=(36,6,64,32)f32 #19278=(36,6,32,64)f32 torch.matmul torch.matmul_2410 2 1 q0.4 19278 attn.10 $input=q0.4 $other=19278 #q0.4=(36,6,64,32)f32 #19278=(36,6,32,64)f32 #attn.10=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3423 2 1 relative_position_bias0.4 26621 19289 $input=relative_position_bias0.4 $dim=26621 #relative_position_bias0.4=(6,64,64)f32 #19289=(1,6,64,64)f32 aten::add pnnx_20910 3 1 attn.10 19289 26622 attn0.2 #attn.10=(36,6,64,64)f32 #19289=(1,6,64,64)f32 #attn0.2=(36,6,64,64)f32 prim::Constant pnnx_20911 0 1 26623 value=0 aten::size pnnx_20912 2 1 attn_mask.2 26623 19291 #attn_mask.2=(36,64,64)f32 prim::NumToTensor pnnx_20913 1 1 19291 other.2 aten::Int pnnx_20914 1 1 other.2 19293 prim::Constant pnnx_20915 0 1 26624 value=trunc aten::div pnnx_20916 3 1 B_.4 other.2 26624 19294 aten::Int pnnx_20917 1 1 19294 19295 prim::Constant pnnx_20918 0 1 26625 value=6 prim::ListConstruct pnnx_20919 5 1 19295 19293 26625 19262 19261 19296 prim::Constant pnnx_20921 0 1 26626 value=1 prim::Constant pnnx_20923 0 1 26627 value=0 prim::Constant pnnx_20925 0 1 26628 value=1 Tensor.view Tensor.view_2131 2 1 attn0.2 19296 19297 $input=attn0.2 $shape=19296 #attn0.2=(36,6,64,64)f32 #19297=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3424 2 1 attn_mask.2 26626 19298 $input=attn_mask.2 $dim=26626 #attn_mask.2=(36,64,64)f32 #19298=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3425 2 1 19298 26627 19299 $input=19298 $dim=26627 #19298=(36,1,64,64)f32 #19299=(1,36,1,64,64)f32 aten::add pnnx_20926 3 1 19297 19299 26628 attn1.2 #19297=(1,36,6,64,64)f32 #19299=(1,36,1,64,64)f32 #attn1.2=(1,36,6,64,64)f32 prim::Constant pnnx_20927 0 1 26629 value=-1 prim::Constant pnnx_20928 0 1 26630 value=6 prim::ListConstruct pnnx_20929 4 1 26629 26630 19260 19259 19301 Tensor.view Tensor.view_2132 2 1 attn1.2 19301 input.8 $input=attn1.2 $shape=19301 #attn1.2=(1,36,6,64,64)f32 #input.8=(36,6,64,64)f32 nn.Softmax layers_mmsa.5.residual_group.blocks.1.attn.softmax 1 1 input.8 19303 dim=-1 #input.8=(36,6,64,64)f32 #19303=(36,6,64,64)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.1.attn.attn_drop 1 1 19303 19304 #19303=(36,6,64,64)f32 #19304=(36,6,64,64)f32 Tensor.select Tensor.select_961 3 1 qkv0.4 26612 26613 v.4 $input=qkv0.4 $dim=26612 $index=26613 #qkv0.4=(3,36,6,64,32)f32 #v.4=(36,6,64,32)f32 prim::Constant pnnx_20932 0 1 26631 value=1 prim::Constant pnnx_20933 0 1 26632 value=2 torch.matmul torch.matmul_2411 2 1 19304 v.4 19305 $input=19304 $other=v.4 #19304=(36,6,64,64)f32 #v.4=(36,6,64,32)f32 #19305=(36,6,64,32)f32 prim::ListConstruct pnnx_20935 3 1 19254 19258 19266 19307 torch.transpose torch.transpose_3206 3 1 19305 26631 26632 19306 $input=19305 $dim0=26631 $dim1=26632 #19305=(36,6,64,32)f32 #19306=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_641 2 1 19306 19307 input0.4 $input=19306 $shape=19307 #19306=(36,64,6,32)f32 #input0.4=(36,64,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.1.attn.proj 1 1 input0.4 19309 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.4=(36,64,192)f32 #19309=(36,64,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.1.attn.proj_drop 1 1 19309 19310 #19309=(36,64,192)f32 #19310=(36,64,192)f32 prim::Constant pnnx_20937 0 1 26633 value=-1 prim::Constant pnnx_20938 0 1 26634 value=8 prim::Constant pnnx_20939 0 1 26635 value=8 prim::ListConstruct pnnx_20940 4 1 26633 26634 26635 19200 19311 prim::Constant pnnx_20942 0 1 26636 value=8 prim::Constant pnnx_20943 0 1 26637 value=trunc aten::div pnnx_20944 3 1 H1.1 26636 26637 19313 aten::Int pnnx_20945 1 1 19313 19314 prim::Constant pnnx_20946 0 1 26638 value=8 prim::Constant pnnx_20947 0 1 26639 value=trunc aten::div pnnx_20948 3 1 W1.1 26638 26639 19315 aten::Int pnnx_20949 1 1 19315 19316 prim::Constant pnnx_20950 0 1 26640 value=1 prim::Constant pnnx_20951 0 1 26641 value=8 prim::Constant pnnx_20952 0 1 26642 value=8 prim::Constant pnnx_20953 0 1 26643 value=-1 prim::ListConstruct pnnx_20954 6 1 26640 19314 19316 26641 26642 26643 19317 prim::Constant pnnx_20956 0 1 26644 value=0 prim::Constant pnnx_20957 0 1 26645 value=1 prim::Constant pnnx_20958 0 1 26646 value=3 prim::Constant pnnx_20959 0 1 26647 value=2 prim::Constant pnnx_20960 0 1 26648 value=4 prim::Constant pnnx_20961 0 1 26649 value=5 prim::ListConstruct pnnx_20962 6 1 26644 26645 26646 26647 26648 26649 19319 Tensor.view Tensor.view_2133 2 1 19310 19311 windows.4 $input=19310 $shape=19311 #19310=(36,64,192)f32 #windows.4=(36,8,8,192)f32 Tensor.view Tensor.view_2134 2 1 windows.4 19317 x3.4 $input=windows.4 $shape=19317 #windows.4=(36,8,8,192)f32 #x3.4=(1,6,6,8,8,192)f32 prim::Constant pnnx_20966 0 1 26651 value=1 prim::Constant pnnx_20967 0 1 26652 value=-1 prim::ListConstruct pnnx_20968 4 1 26651 1567 1807 26652 19322 torch.permute torch.permute_2943 2 1 x3.4 19319 19320 $input=x3.4 $dims=19319 #x3.4=(1,6,6,8,8,192)f32 #19320=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_313 1 1 19320 19321 memory_format=torch.contiguous_format $input=19320 #19320=(1,6,8,6,8,192)f32 #19321=(1,6,8,6,8,192)f32 prim::Constant pnnx_20970 0 1 26653 value=4 prim::Constant pnnx_20971 0 1 26654 value=4 prim::ListConstruct pnnx_20972 2 1 26653 26654 19324 prim::Constant pnnx_20973 0 1 26655 value=1 prim::Constant pnnx_20974 0 1 26656 value=2 prim::ListConstruct pnnx_20975 2 1 26655 26656 19325 Tensor.view Tensor.view_2135 2 1 19321 19322 shifted_x.2 $input=19321 $shape=19322 #19321=(1,6,8,6,8,192)f32 #shifted_x.2=(1,48,48,192)f32 aten::mul pnnx_20977 2 1 H1.1 W1.1 19327 aten::Int pnnx_20978 1 1 19327 19328 prim::ListConstruct pnnx_20979 3 1 19195 19328 19199 19329 prim::Constant pnnx_20981 0 1 19331 value=None prim::Constant pnnx_20982 0 1 26657 value=1 torch.roll torch.roll_2523 3 1 shifted_x.2 19324 19325 x4.4 $input=shifted_x.2 $shifts=19324 $dims=19325 #shifted_x.2=(1,48,48,192)f32 #x4.4=(1,48,48,192)f32 Tensor.view Tensor.view_2136 2 1 x4.4 19329 x5.2 $input=x4.4 $shape=19329 #x4.4=(1,48,48,192)f32 #x5.2=(1,2304,192)f32 aten::add pnnx_20983 3 1 19174 x5.2 26657 input.10 #19174=(1,2304,192)f32 #x5.2=(1,2304,192)f32 #input.10=(1,2304,192)f32 nn.LayerNorm layers_mmsa.5.residual_group.blocks.1.norm2 1 1 input.10 19333 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.10=(1,2304,192)f32 #19333=(1,2304,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.1.mlp.fc1 1 1 19333 19338 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #19333=(1,2304,192)f32 #19338=(1,2304,384)f32 nn.GELU layers_mmsa.5.residual_group.blocks.1.mlp.act 1 1 19338 19339 #19338=(1,2304,384)f32 #19339=(1,2304,384)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.1.mlp.drop 1 1 19339 19340 #19339=(1,2304,384)f32 #19340=(1,2304,384)f32 nn.Linear layers_mmsa.5.residual_group.blocks.1.mlp.fc2 1 1 19340 19341 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #19340=(1,2304,384)f32 #19341=(1,2304,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.1.mlp.drop 1 1 19341 19342 #19341=(1,2304,192)f32 #19342=(1,2304,192)f32 prim::Constant pnnx_20984 0 1 19343 value=None prim::Constant pnnx_20985 0 1 26658 value=1 aten::add pnnx_20986 3 1 input.10 19342 26658 19344 #input.10=(1,2304,192)f32 #19342=(1,2304,192)f32 #19344=(1,2304,192)f32 prim::Constant pnnx_20987 0 1 19345 value=trunc prim::Constant pnnx_20988 0 1 19346 value=8 prim::Constant pnnx_20989 0 1 19347 value=0 prim::Constant pnnx_20990 0 1 19348 value=2 prim::Constant pnnx_20991 0 1 19349 value=1 prim::Constant pnnx_20992 0 1 19350 value=3 prim::Constant pnnx_20993 0 1 19351 value=8 prim::Constant pnnx_20994 0 1 19352 value=4 prim::Constant pnnx_20995 0 1 19353 value=5 prim::Constant pnnx_20996 0 1 19354 value=-1 prim::Constant pnnx_20997 0 1 19355 value=64 aten::size pnnx_20998 2 1 19344 19347 19361 #19344=(1,2304,192)f32 prim::NumToTensor pnnx_20999 1 1 19361 B.6 aten::Int pnnx_21000 1 1 B.6 19363 aten::Int pnnx_21001 1 1 B.6 19364 aten::size pnnx_21002 2 1 19344 19348 19365 #19344=(1,2304,192)f32 prim::NumToTensor pnnx_21003 1 1 19365 C.18 aten::Int pnnx_21004 1 1 C.18 19367 aten::Int pnnx_21005 1 1 C.18 19368 aten::Int pnnx_21006 1 1 C.18 19369 aten::Int pnnx_21007 1 1 C.18 19370 nn.LayerNorm layers_mmsa.5.residual_group.blocks.2.norm1 1 1 19344 19371 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19344=(1,2304,192)f32 #19371=(1,2304,192)f32 prim::ListConstruct pnnx_21008 4 1 19364 1564 1804 19370 19372 prim::Constant pnnx_21010 0 1 26659 value=0 Tensor.view Tensor.view_2137 2 1 19371 19372 x.6 $input=19371 $shape=19372 #19371=(1,2304,192)f32 #x.6=(1,48,48,192)f32 aten::size pnnx_21011 2 1 x.6 26659 19374 #x.6=(1,48,48,192)f32 prim::NumToTensor pnnx_21012 1 1 19374 B0.6 aten::Int pnnx_21013 1 1 B0.6 19376 aten::size pnnx_21014 2 1 x.6 19349 19377 #x.6=(1,48,48,192)f32 prim::NumToTensor pnnx_21015 1 1 19377 19378 prim::Constant pnnx_21016 0 1 26660 value=2 aten::size pnnx_21017 2 1 x.6 26660 19379 #x.6=(1,48,48,192)f32 prim::NumToTensor pnnx_21018 1 1 19379 19380 aten::size pnnx_21019 2 1 x.6 19350 19381 #x.6=(1,48,48,192)f32 prim::NumToTensor pnnx_21020 1 1 19381 C0.6 aten::Int pnnx_21021 1 1 C0.6 19383 aten::Int pnnx_21022 1 1 C0.6 19384 aten::div pnnx_21023 3 1 19378 19346 19345 19385 aten::Int pnnx_21024 1 1 19385 19386 prim::Constant pnnx_21025 0 1 26661 value=8 prim::Constant pnnx_21026 0 1 26662 value=trunc aten::div pnnx_21027 3 1 19380 26661 26662 19387 aten::Int pnnx_21028 1 1 19387 19388 prim::Constant pnnx_21029 0 1 26663 value=8 prim::ListConstruct pnnx_21030 6 1 19376 19386 19351 19388 26663 19384 19389 prim::Constant pnnx_21032 0 1 26664 value=0 prim::Constant pnnx_21033 0 1 26665 value=1 prim::Constant pnnx_21034 0 1 26666 value=3 prim::Constant pnnx_21035 0 1 26667 value=2 prim::ListConstruct pnnx_21036 6 1 26664 26665 26666 26667 19352 19353 19391 Tensor.view Tensor.view_2138 2 1 x.6 19389 x0.6 $input=x.6 $shape=19389 #x.6=(1,48,48,192)f32 #x0.6=(1,6,8,6,8,192)f32 prim::Constant pnnx_21040 0 1 26669 value=8 prim::Constant pnnx_21041 0 1 26670 value=8 prim::ListConstruct pnnx_21042 4 1 19354 26669 26670 19383 19394 torch.permute torch.permute_2944 2 1 x0.6 19391 19392 $input=x0.6 $dims=19391 #x0.6=(1,6,8,6,8,192)f32 #19392=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_314 1 1 19392 19393 memory_format=torch.contiguous_format $input=19392 #19392=(1,6,6,8,8,192)f32 #19393=(1,6,6,8,8,192)f32 prim::Constant pnnx_21044 0 1 26671 value=-1 prim::ListConstruct pnnx_21045 3 1 26671 19355 19369 19396 prim::Constant pnnx_21047 0 1 19398 value=1.767767e-01 prim::Constant pnnx_21048 0 1 19399 value=trunc prim::Constant pnnx_21049 0 1 19400 value=6 prim::Constant pnnx_21050 0 1 19401 value=0 prim::Constant pnnx_21051 0 1 19402 value=1 prim::Constant pnnx_21052 0 1 19403 value=2 prim::Constant pnnx_21053 0 1 19404 value=3 prim::Constant pnnx_21054 0 1 19405 value=6 prim::Constant pnnx_21055 0 1 19406 value=4 prim::Constant pnnx_21056 0 1 19407 value=-2 prim::Constant pnnx_21057 0 1 19408 value=-1 prim::Constant pnnx_21058 0 1 19409 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.2.attn 0 1 relative_position_bias_table.6 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.6=(225,6)f32 pnnx.Attribute layers_mmsa.5.residual_group.blocks.2.attn 0 1 relative_position_index.6 @relative_position_index=(64,64)i64 #relative_position_index.6=(64,64)i64 Tensor.view Tensor.view_2139 2 1 19393 19394 x_windows.6 $input=19393 $shape=19394 #19393=(1,6,6,8,8,192)f32 #x_windows.6=(36,8,8,192)f32 Tensor.view Tensor.view_2140 2 1 x_windows.6 19396 x1.6 $input=x_windows.6 $shape=19396 #x_windows.6=(36,8,8,192)f32 #x1.6=(36,64,192)f32 aten::size pnnx_21059 2 1 x1.6 19401 19417 #x1.6=(36,64,192)f32 prim::NumToTensor pnnx_21060 1 1 19417 B_.6 aten::Int pnnx_21061 1 1 B_.6 19419 aten::Int pnnx_21062 1 1 B_.6 19420 aten::size pnnx_21063 2 1 x1.6 19402 19421 #x1.6=(36,64,192)f32 prim::NumToTensor pnnx_21064 1 1 19421 N.6 aten::Int pnnx_21065 1 1 N.6 19423 aten::Int pnnx_21066 1 1 N.6 19424 aten::size pnnx_21067 2 1 x1.6 19403 19425 #x1.6=(36,64,192)f32 prim::NumToTensor pnnx_21068 1 1 19425 C.20 aten::Int pnnx_21069 1 1 C.20 19427 nn.Linear layers_mmsa.5.residual_group.blocks.2.attn.qkv 1 1 x1.6 19428 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.6=(36,64,192)f32 #19428=(36,64,576)f32 aten::div pnnx_21070 3 1 C.20 19400 19399 19429 aten::Int pnnx_21071 1 1 19429 19430 prim::ListConstruct pnnx_21072 5 1 19420 19424 19404 19405 19430 19431 prim::Constant pnnx_21074 0 1 26672 value=2 prim::Constant pnnx_21075 0 1 26673 value=0 prim::Constant pnnx_21076 0 1 26674 value=3 prim::Constant pnnx_21077 0 1 26675 value=1 prim::ListConstruct pnnx_21078 5 1 26672 26673 26674 26675 19406 19433 Tensor.reshape Tensor.reshape_642 2 1 19428 19431 19432 $input=19428 $shape=19431 #19428=(36,64,576)f32 #19432=(36,64,3,6,32)f32 prim::Constant pnnx_21080 0 1 26676 value=0 prim::Constant pnnx_21081 0 1 26677 value=0 prim::Constant pnnx_21083 0 1 26678 value=0 prim::Constant pnnx_21084 0 1 26679 value=1 prim::Constant pnnx_21086 0 1 26680 value=0 prim::Constant pnnx_21087 0 1 26681 value=2 torch.permute torch.permute_2945 2 1 19432 19433 qkv0.6 $input=19432 $dims=19433 #19432=(36,64,3,6,32)f32 #qkv0.6=(3,36,6,64,32)f32 Tensor.select Tensor.select_962 3 1 qkv0.6 26676 26677 q.6 $input=qkv0.6 $dim=26676 $index=26677 #qkv0.6=(3,36,6,64,32)f32 #q.6=(36,6,64,32)f32 aten::mul pnnx_21089 2 1 q.6 19398 q0.6 #q.6=(36,6,64,32)f32 #q0.6=(36,6,64,32)f32 Tensor.select Tensor.select_963 3 1 qkv0.6 26678 26679 k.6 $input=qkv0.6 $dim=26678 $index=26679 #qkv0.6=(3,36,6,64,32)f32 #k.6=(36,6,64,32)f32 prim::Constant pnnx_21092 0 1 26682 value=-1 prim::ListConstruct pnnx_21093 1 1 26682 19441 Tensor.view Tensor.view_2141 2 1 relative_position_index.6 19441 19442 $input=relative_position_index.6 $shape=19441 #relative_position_index.6=(64,64)i64 #19442=(4096)i64 prim::ListConstruct pnnx_21095 1 1 19442 19443 #19442=(4096)i64 prim::Constant pnnx_21097 0 1 26683 value=64 prim::Constant pnnx_21098 0 1 26684 value=-1 prim::ListConstruct pnnx_21099 3 1 19409 26683 26684 19445 Tensor.index Tensor.index_430 2 1 relative_position_bias_table.6 19443 19444 $input=relative_position_bias_table.6 $expr=19443 #relative_position_bias_table.6=(225,6)f32 #19444=(4096,6)f32 prim::Constant pnnx_21101 0 1 26685 value=2 prim::Constant pnnx_21102 0 1 26686 value=0 prim::Constant pnnx_21103 0 1 26687 value=1 prim::ListConstruct pnnx_21104 3 1 26685 26686 26687 19447 Tensor.view Tensor.view_2142 2 1 19444 19445 relative_position_bias.6 $input=19444 $shape=19445 #19444=(4096,6)f32 #relative_position_bias.6=(64,64,6)f32 prim::Constant pnnx_21108 0 1 26689 value=0 torch.permute torch.permute_2946 2 1 relative_position_bias.6 19447 19448 $input=relative_position_bias.6 $dims=19447 #relative_position_bias.6=(64,64,6)f32 #19448=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_315 1 1 19448 relative_position_bias0.6 memory_format=torch.contiguous_format $input=19448 #19448=(6,64,64)f32 #relative_position_bias0.6=(6,64,64)f32 prim::Constant pnnx_21110 0 1 26690 value=1 torch.transpose torch.transpose_3207 3 1 k.6 19407 19408 19439 $input=k.6 $dim0=19407 $dim1=19408 #k.6=(36,6,64,32)f32 #19439=(36,6,32,64)f32 torch.matmul torch.matmul_2412 2 1 q0.6 19439 attn.14 $input=q0.6 $other=19439 #q0.6=(36,6,64,32)f32 #19439=(36,6,32,64)f32 #attn.14=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3426 2 1 relative_position_bias0.6 26689 19450 $input=relative_position_bias0.6 $dim=26689 #relative_position_bias0.6=(6,64,64)f32 #19450=(1,6,64,64)f32 aten::add pnnx_21111 3 1 attn.14 19450 26690 input.12 #attn.14=(36,6,64,64)f32 #19450=(1,6,64,64)f32 #input.12=(36,6,64,64)f32 nn.Softmax layers_mmsa.5.residual_group.blocks.2.attn.softmax 1 1 input.12 19452 dim=-1 #input.12=(36,6,64,64)f32 #19452=(36,6,64,64)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.2.attn.attn_drop 1 1 19452 19453 #19452=(36,6,64,64)f32 #19453=(36,6,64,64)f32 Tensor.select Tensor.select_964 3 1 qkv0.6 26680 26681 v.6 $input=qkv0.6 $dim=26680 $index=26681 #qkv0.6=(3,36,6,64,32)f32 #v.6=(36,6,64,32)f32 prim::Constant pnnx_21113 0 1 26691 value=1 prim::Constant pnnx_21114 0 1 26692 value=2 torch.matmul torch.matmul_2413 2 1 19453 v.6 19454 $input=19453 $other=v.6 #19453=(36,6,64,64)f32 #v.6=(36,6,64,32)f32 #19454=(36,6,64,32)f32 prim::ListConstruct pnnx_21116 3 1 19419 19423 19427 19456 torch.transpose torch.transpose_3208 3 1 19454 26691 26692 19455 $input=19454 $dim0=26691 $dim1=26692 #19454=(36,6,64,32)f32 #19455=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_643 2 1 19455 19456 input0.6 $input=19455 $shape=19456 #19455=(36,64,6,32)f32 #input0.6=(36,64,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.2.attn.proj 1 1 input0.6 19458 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.6=(36,64,192)f32 #19458=(36,64,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.2.attn.proj_drop 1 1 19458 19459 #19458=(36,64,192)f32 #19459=(36,64,192)f32 prim::Constant pnnx_21118 0 1 26693 value=-1 prim::Constant pnnx_21119 0 1 26694 value=8 prim::Constant pnnx_21120 0 1 26695 value=8 prim::ListConstruct pnnx_21121 4 1 26693 26694 26695 19368 19460 prim::Constant pnnx_21123 0 1 26696 value=8 prim::Constant pnnx_21124 0 1 26697 value=trunc aten::div pnnx_21125 3 1 H1.1 26696 26697 19462 aten::Int pnnx_21126 1 1 19462 19463 prim::Constant pnnx_21127 0 1 26698 value=8 prim::Constant pnnx_21128 0 1 26699 value=trunc aten::div pnnx_21129 3 1 W1.1 26698 26699 19464 aten::Int pnnx_21130 1 1 19464 19465 prim::Constant pnnx_21131 0 1 26700 value=1 prim::Constant pnnx_21132 0 1 26701 value=8 prim::Constant pnnx_21133 0 1 26702 value=8 prim::Constant pnnx_21134 0 1 26703 value=-1 prim::ListConstruct pnnx_21135 6 1 26700 19463 19465 26701 26702 26703 19466 prim::Constant pnnx_21137 0 1 26704 value=0 prim::Constant pnnx_21138 0 1 26705 value=1 prim::Constant pnnx_21139 0 1 26706 value=3 prim::Constant pnnx_21140 0 1 26707 value=2 prim::Constant pnnx_21141 0 1 26708 value=4 prim::Constant pnnx_21142 0 1 26709 value=5 prim::ListConstruct pnnx_21143 6 1 26704 26705 26706 26707 26708 26709 19468 Tensor.view Tensor.view_2143 2 1 19459 19460 windows.6 $input=19459 $shape=19460 #19459=(36,64,192)f32 #windows.6=(36,8,8,192)f32 Tensor.view Tensor.view_2144 2 1 windows.6 19466 x2.6 $input=windows.6 $shape=19466 #windows.6=(36,8,8,192)f32 #x2.6=(1,6,6,8,8,192)f32 prim::Constant pnnx_21147 0 1 26711 value=1 prim::Constant pnnx_21148 0 1 26712 value=-1 prim::ListConstruct pnnx_21149 4 1 26711 1561 1801 26712 19471 torch.permute torch.permute_2947 2 1 x2.6 19468 19469 $input=x2.6 $dims=19468 #x2.6=(1,6,6,8,8,192)f32 #19469=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_316 1 1 19469 19470 memory_format=torch.contiguous_format $input=19469 #19469=(1,6,8,6,8,192)f32 #19470=(1,6,8,6,8,192)f32 aten::mul pnnx_21151 2 1 H1.1 W1.1 19473 aten::Int pnnx_21152 1 1 19473 19474 prim::ListConstruct pnnx_21153 3 1 19363 19474 19367 19475 prim::Constant pnnx_21155 0 1 19477 value=None prim::Constant pnnx_21156 0 1 26713 value=1 Tensor.view Tensor.view_2145 2 1 19470 19471 x3.6 $input=19470 $shape=19471 #19470=(1,6,8,6,8,192)f32 #x3.6=(1,48,48,192)f32 Tensor.view Tensor.view_2146 2 1 x3.6 19475 x4.6 $input=x3.6 $shape=19475 #x3.6=(1,48,48,192)f32 #x4.6=(1,2304,192)f32 aten::add pnnx_21157 3 1 19344 x4.6 26713 input.14 #19344=(1,2304,192)f32 #x4.6=(1,2304,192)f32 #input.14=(1,2304,192)f32 nn.LayerNorm layers_mmsa.5.residual_group.blocks.2.norm2 1 1 input.14 19479 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.14=(1,2304,192)f32 #19479=(1,2304,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.2.mlp.fc1 1 1 19479 19484 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #19479=(1,2304,192)f32 #19484=(1,2304,384)f32 nn.GELU layers_mmsa.5.residual_group.blocks.2.mlp.act 1 1 19484 19485 #19484=(1,2304,384)f32 #19485=(1,2304,384)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.2.mlp.drop 1 1 19485 19486 #19485=(1,2304,384)f32 #19486=(1,2304,384)f32 nn.Linear layers_mmsa.5.residual_group.blocks.2.mlp.fc2 1 1 19486 19487 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #19486=(1,2304,384)f32 #19487=(1,2304,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.2.mlp.drop 1 1 19487 19488 #19487=(1,2304,192)f32 #19488=(1,2304,192)f32 prim::Constant pnnx_21158 0 1 19489 value=None prim::Constant pnnx_21159 0 1 26714 value=1 aten::add pnnx_21160 3 1 input.14 19488 26714 19490 #input.14=(1,2304,192)f32 #19488=(1,2304,192)f32 #19490=(1,2304,192)f32 prim::Constant pnnx_21161 0 1 19491 value=trunc prim::Constant pnnx_21162 0 1 19492 value=8 prim::Constant pnnx_21163 0 1 19493 value=0 prim::Constant pnnx_21164 0 1 19494 value=2 prim::Constant pnnx_21165 0 1 19495 value=-4 prim::Constant pnnx_21166 0 1 19496 value=1 prim::Constant pnnx_21167 0 1 19497 value=3 prim::Constant pnnx_21168 0 1 19498 value=8 prim::Constant pnnx_21169 0 1 19499 value=4 prim::Constant pnnx_21170 0 1 19500 value=5 prim::Constant pnnx_21171 0 1 19501 value=-1 prim::Constant pnnx_21172 0 1 19502 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.3 0 1 attn_mask.4 @attn_mask=(36,64,64)f32 #attn_mask.4=(36,64,64)f32 aten::size pnnx_21173 2 1 19490 19493 19509 #19490=(1,2304,192)f32 prim::NumToTensor pnnx_21174 1 1 19509 B.8 aten::Int pnnx_21175 1 1 B.8 19511 aten::Int pnnx_21176 1 1 B.8 19512 aten::size pnnx_21177 2 1 19490 19494 19513 #19490=(1,2304,192)f32 prim::NumToTensor pnnx_21178 1 1 19513 C.22 aten::Int pnnx_21179 1 1 C.22 19515 aten::Int pnnx_21180 1 1 C.22 19516 aten::Int pnnx_21181 1 1 C.22 19517 aten::Int pnnx_21182 1 1 C.22 19518 nn.LayerNorm layers_mmsa.5.residual_group.blocks.3.norm1 1 1 19490 19519 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19490=(1,2304,192)f32 #19519=(1,2304,192)f32 prim::ListConstruct pnnx_21183 4 1 19512 1558 1798 19518 19520 prim::Constant pnnx_21185 0 1 26715 value=-4 prim::ListConstruct pnnx_21186 2 1 19495 26715 19522 prim::Constant pnnx_21187 0 1 26716 value=2 prim::ListConstruct pnnx_21188 2 1 19496 26716 19523 Tensor.view Tensor.view_2147 2 1 19519 19520 x.8 $input=19519 $shape=19520 #19519=(1,2304,192)f32 #x.8=(1,48,48,192)f32 prim::Constant pnnx_21190 0 1 26717 value=0 torch.roll torch.roll_2524 3 1 x.8 19522 19523 x0.8 $input=x.8 $shifts=19522 $dims=19523 #x.8=(1,48,48,192)f32 #x0.8=(1,48,48,192)f32 aten::size pnnx_21191 2 1 x0.8 26717 19525 #x0.8=(1,48,48,192)f32 prim::NumToTensor pnnx_21192 1 1 19525 B0.8 aten::Int pnnx_21193 1 1 B0.8 19527 prim::Constant pnnx_21194 0 1 26718 value=1 aten::size pnnx_21195 2 1 x0.8 26718 19528 #x0.8=(1,48,48,192)f32 prim::NumToTensor pnnx_21196 1 1 19528 19529 prim::Constant pnnx_21197 0 1 26719 value=2 aten::size pnnx_21198 2 1 x0.8 26719 19530 #x0.8=(1,48,48,192)f32 prim::NumToTensor pnnx_21199 1 1 19530 19531 aten::size pnnx_21200 2 1 x0.8 19497 19532 #x0.8=(1,48,48,192)f32 prim::NumToTensor pnnx_21201 1 1 19532 C0.8 aten::Int pnnx_21202 1 1 C0.8 19534 aten::Int pnnx_21203 1 1 C0.8 19535 aten::div pnnx_21204 3 1 19529 19492 19491 19536 aten::Int pnnx_21205 1 1 19536 19537 prim::Constant pnnx_21206 0 1 26720 value=8 prim::Constant pnnx_21207 0 1 26721 value=trunc aten::div pnnx_21208 3 1 19531 26720 26721 19538 aten::Int pnnx_21209 1 1 19538 19539 prim::Constant pnnx_21210 0 1 26722 value=8 prim::ListConstruct pnnx_21211 6 1 19527 19537 19498 19539 26722 19535 19540 prim::Constant pnnx_21213 0 1 26723 value=0 prim::Constant pnnx_21214 0 1 26724 value=1 prim::Constant pnnx_21215 0 1 26725 value=3 prim::Constant pnnx_21216 0 1 26726 value=2 prim::ListConstruct pnnx_21217 6 1 26723 26724 26725 26726 19499 19500 19542 Tensor.view Tensor.view_2148 2 1 x0.8 19540 x1.8 $input=x0.8 $shape=19540 #x0.8=(1,48,48,192)f32 #x1.8=(1,6,8,6,8,192)f32 prim::Constant pnnx_21221 0 1 26728 value=8 prim::Constant pnnx_21222 0 1 26729 value=8 prim::ListConstruct pnnx_21223 4 1 19501 26728 26729 19534 19545 torch.permute torch.permute_2948 2 1 x1.8 19542 19543 $input=x1.8 $dims=19542 #x1.8=(1,6,8,6,8,192)f32 #19543=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_317 1 1 19543 19544 memory_format=torch.contiguous_format $input=19543 #19543=(1,6,6,8,8,192)f32 #19544=(1,6,6,8,8,192)f32 prim::Constant pnnx_21225 0 1 26730 value=-1 prim::ListConstruct pnnx_21226 3 1 26730 19502 19517 19547 prim::Constant pnnx_21228 0 1 19549 value=1.767767e-01 prim::Constant pnnx_21229 0 1 19550 value=trunc prim::Constant pnnx_21230 0 1 19551 value=6 prim::Constant pnnx_21231 0 1 19552 value=0 prim::Constant pnnx_21232 0 1 19553 value=1 prim::Constant pnnx_21233 0 1 19554 value=2 prim::Constant pnnx_21234 0 1 19555 value=3 prim::Constant pnnx_21235 0 1 19556 value=6 prim::Constant pnnx_21236 0 1 19557 value=4 prim::Constant pnnx_21237 0 1 19558 value=-2 prim::Constant pnnx_21238 0 1 19559 value=-1 prim::Constant pnnx_21239 0 1 19560 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.3.attn 0 1 relative_position_bias_table.8 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.8=(225,6)f32 pnnx.Attribute layers_mmsa.5.residual_group.blocks.3.attn 0 1 relative_position_index.8 @relative_position_index=(64,64)i64 #relative_position_index.8=(64,64)i64 Tensor.view Tensor.view_2149 2 1 19544 19545 x_windows.8 $input=19544 $shape=19545 #19544=(1,6,6,8,8,192)f32 #x_windows.8=(36,8,8,192)f32 Tensor.view Tensor.view_2150 2 1 x_windows.8 19547 x2.8 $input=x_windows.8 $shape=19547 #x_windows.8=(36,8,8,192)f32 #x2.8=(36,64,192)f32 aten::size pnnx_21240 2 1 x2.8 19552 19568 #x2.8=(36,64,192)f32 prim::NumToTensor pnnx_21241 1 1 19568 B_.8 aten::Int pnnx_21242 1 1 B_.8 19570 aten::Int pnnx_21243 1 1 B_.8 19571 aten::size pnnx_21244 2 1 x2.8 19553 19572 #x2.8=(36,64,192)f32 prim::NumToTensor pnnx_21245 1 1 19572 N.8 aten::Int pnnx_21246 1 1 N.8 19574 aten::Int pnnx_21247 1 1 N.8 19575 aten::Int pnnx_21248 1 1 N.8 19576 aten::Int pnnx_21249 1 1 N.8 19577 aten::Int pnnx_21250 1 1 N.8 19578 aten::Int pnnx_21251 1 1 N.8 19579 aten::size pnnx_21252 2 1 x2.8 19554 19580 #x2.8=(36,64,192)f32 prim::NumToTensor pnnx_21253 1 1 19580 C.24 aten::Int pnnx_21254 1 1 C.24 19582 nn.Linear layers_mmsa.5.residual_group.blocks.3.attn.qkv 1 1 x2.8 19583 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.8=(36,64,192)f32 #19583=(36,64,576)f32 aten::div pnnx_21255 3 1 C.24 19551 19550 19584 aten::Int pnnx_21256 1 1 19584 19585 prim::ListConstruct pnnx_21257 5 1 19571 19579 19555 19556 19585 19586 prim::Constant pnnx_21259 0 1 26731 value=2 prim::Constant pnnx_21260 0 1 26732 value=0 prim::Constant pnnx_21261 0 1 26733 value=3 prim::Constant pnnx_21262 0 1 26734 value=1 prim::ListConstruct pnnx_21263 5 1 26731 26732 26733 26734 19557 19588 Tensor.reshape Tensor.reshape_644 2 1 19583 19586 19587 $input=19583 $shape=19586 #19583=(36,64,576)f32 #19587=(36,64,3,6,32)f32 prim::Constant pnnx_21265 0 1 26735 value=0 prim::Constant pnnx_21266 0 1 26736 value=0 prim::Constant pnnx_21268 0 1 26737 value=0 prim::Constant pnnx_21269 0 1 26738 value=1 prim::Constant pnnx_21271 0 1 26739 value=0 prim::Constant pnnx_21272 0 1 26740 value=2 torch.permute torch.permute_2949 2 1 19587 19588 qkv0.8 $input=19587 $dims=19588 #19587=(36,64,3,6,32)f32 #qkv0.8=(3,36,6,64,32)f32 Tensor.select Tensor.select_965 3 1 qkv0.8 26735 26736 q.8 $input=qkv0.8 $dim=26735 $index=26736 #qkv0.8=(3,36,6,64,32)f32 #q.8=(36,6,64,32)f32 aten::mul pnnx_21274 2 1 q.8 19549 q0.8 #q.8=(36,6,64,32)f32 #q0.8=(36,6,64,32)f32 Tensor.select Tensor.select_966 3 1 qkv0.8 26737 26738 k.8 $input=qkv0.8 $dim=26737 $index=26738 #qkv0.8=(3,36,6,64,32)f32 #k.8=(36,6,64,32)f32 prim::Constant pnnx_21277 0 1 26741 value=-1 prim::ListConstruct pnnx_21278 1 1 26741 19596 Tensor.view Tensor.view_2151 2 1 relative_position_index.8 19596 19597 $input=relative_position_index.8 $shape=19596 #relative_position_index.8=(64,64)i64 #19597=(4096)i64 prim::ListConstruct pnnx_21280 1 1 19597 19598 #19597=(4096)i64 prim::Constant pnnx_21282 0 1 26742 value=64 prim::Constant pnnx_21283 0 1 26743 value=-1 prim::ListConstruct pnnx_21284 3 1 19560 26742 26743 19600 Tensor.index Tensor.index_431 2 1 relative_position_bias_table.8 19598 19599 $input=relative_position_bias_table.8 $expr=19598 #relative_position_bias_table.8=(225,6)f32 #19599=(4096,6)f32 prim::Constant pnnx_21286 0 1 26744 value=2 prim::Constant pnnx_21287 0 1 26745 value=0 prim::Constant pnnx_21288 0 1 26746 value=1 prim::ListConstruct pnnx_21289 3 1 26744 26745 26746 19602 Tensor.view Tensor.view_2152 2 1 19599 19600 relative_position_bias.8 $input=19599 $shape=19600 #19599=(4096,6)f32 #relative_position_bias.8=(64,64,6)f32 prim::Constant pnnx_21293 0 1 26748 value=0 torch.permute torch.permute_2950 2 1 relative_position_bias.8 19602 19603 $input=relative_position_bias.8 $dims=19602 #relative_position_bias.8=(64,64,6)f32 #19603=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_318 1 1 19603 relative_position_bias0.8 memory_format=torch.contiguous_format $input=19603 #19603=(6,64,64)f32 #relative_position_bias0.8=(6,64,64)f32 prim::Constant pnnx_21295 0 1 26749 value=1 torch.transpose torch.transpose_3209 3 1 k.8 19558 19559 19594 $input=k.8 $dim0=19558 $dim1=19559 #k.8=(36,6,64,32)f32 #19594=(36,6,32,64)f32 torch.matmul torch.matmul_2414 2 1 q0.8 19594 attn.18 $input=q0.8 $other=19594 #q0.8=(36,6,64,32)f32 #19594=(36,6,32,64)f32 #attn.18=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3427 2 1 relative_position_bias0.8 26748 19605 $input=relative_position_bias0.8 $dim=26748 #relative_position_bias0.8=(6,64,64)f32 #19605=(1,6,64,64)f32 aten::add pnnx_21296 3 1 attn.18 19605 26749 attn0.4 #attn.18=(36,6,64,64)f32 #19605=(1,6,64,64)f32 #attn0.4=(36,6,64,64)f32 prim::Constant pnnx_21297 0 1 26750 value=0 aten::size pnnx_21298 2 1 attn_mask.4 26750 19607 #attn_mask.4=(36,64,64)f32 prim::NumToTensor pnnx_21299 1 1 19607 other.4 aten::Int pnnx_21300 1 1 other.4 19609 prim::Constant pnnx_21301 0 1 26751 value=trunc aten::div pnnx_21302 3 1 B_.8 other.4 26751 19610 aten::Int pnnx_21303 1 1 19610 19611 prim::Constant pnnx_21304 0 1 26752 value=6 prim::ListConstruct pnnx_21305 5 1 19611 19609 26752 19578 19577 19612 prim::Constant pnnx_21307 0 1 26753 value=1 prim::Constant pnnx_21309 0 1 26754 value=0 prim::Constant pnnx_21311 0 1 26755 value=1 Tensor.view Tensor.view_2153 2 1 attn0.4 19612 19613 $input=attn0.4 $shape=19612 #attn0.4=(36,6,64,64)f32 #19613=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3428 2 1 attn_mask.4 26753 19614 $input=attn_mask.4 $dim=26753 #attn_mask.4=(36,64,64)f32 #19614=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3429 2 1 19614 26754 19615 $input=19614 $dim=26754 #19614=(36,1,64,64)f32 #19615=(1,36,1,64,64)f32 aten::add pnnx_21312 3 1 19613 19615 26755 attn1.4 #19613=(1,36,6,64,64)f32 #19615=(1,36,1,64,64)f32 #attn1.4=(1,36,6,64,64)f32 prim::Constant pnnx_21313 0 1 26756 value=-1 prim::Constant pnnx_21314 0 1 26757 value=6 prim::ListConstruct pnnx_21315 4 1 26756 26757 19576 19575 19617 Tensor.view Tensor.view_2154 2 1 attn1.4 19617 input.16 $input=attn1.4 $shape=19617 #attn1.4=(1,36,6,64,64)f32 #input.16=(36,6,64,64)f32 nn.Softmax layers_mmsa.5.residual_group.blocks.3.attn.softmax 1 1 input.16 19619 dim=-1 #input.16=(36,6,64,64)f32 #19619=(36,6,64,64)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.3.attn.attn_drop 1 1 19619 19620 #19619=(36,6,64,64)f32 #19620=(36,6,64,64)f32 Tensor.select Tensor.select_967 3 1 qkv0.8 26739 26740 v.8 $input=qkv0.8 $dim=26739 $index=26740 #qkv0.8=(3,36,6,64,32)f32 #v.8=(36,6,64,32)f32 prim::Constant pnnx_21318 0 1 26758 value=1 prim::Constant pnnx_21319 0 1 26759 value=2 torch.matmul torch.matmul_2415 2 1 19620 v.8 19621 $input=19620 $other=v.8 #19620=(36,6,64,64)f32 #v.8=(36,6,64,32)f32 #19621=(36,6,64,32)f32 prim::ListConstruct pnnx_21321 3 1 19570 19574 19582 19623 torch.transpose torch.transpose_3210 3 1 19621 26758 26759 19622 $input=19621 $dim0=26758 $dim1=26759 #19621=(36,6,64,32)f32 #19622=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_645 2 1 19622 19623 input0.8 $input=19622 $shape=19623 #19622=(36,64,6,32)f32 #input0.8=(36,64,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.3.attn.proj 1 1 input0.8 19625 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.8=(36,64,192)f32 #19625=(36,64,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.3.attn.proj_drop 1 1 19625 19626 #19625=(36,64,192)f32 #19626=(36,64,192)f32 prim::Constant pnnx_21323 0 1 26760 value=-1 prim::Constant pnnx_21324 0 1 26761 value=8 prim::Constant pnnx_21325 0 1 26762 value=8 prim::ListConstruct pnnx_21326 4 1 26760 26761 26762 19516 19627 prim::Constant pnnx_21328 0 1 26763 value=8 prim::Constant pnnx_21329 0 1 26764 value=trunc aten::div pnnx_21330 3 1 H1.1 26763 26764 19629 aten::Int pnnx_21331 1 1 19629 19630 prim::Constant pnnx_21332 0 1 26765 value=8 prim::Constant pnnx_21333 0 1 26766 value=trunc aten::div pnnx_21334 3 1 W1.1 26765 26766 19631 aten::Int pnnx_21335 1 1 19631 19632 prim::Constant pnnx_21336 0 1 26767 value=1 prim::Constant pnnx_21337 0 1 26768 value=8 prim::Constant pnnx_21338 0 1 26769 value=8 prim::Constant pnnx_21339 0 1 26770 value=-1 prim::ListConstruct pnnx_21340 6 1 26767 19630 19632 26768 26769 26770 19633 prim::Constant pnnx_21342 0 1 26771 value=0 prim::Constant pnnx_21343 0 1 26772 value=1 prim::Constant pnnx_21344 0 1 26773 value=3 prim::Constant pnnx_21345 0 1 26774 value=2 prim::Constant pnnx_21346 0 1 26775 value=4 prim::Constant pnnx_21347 0 1 26776 value=5 prim::ListConstruct pnnx_21348 6 1 26771 26772 26773 26774 26775 26776 19635 Tensor.view Tensor.view_2155 2 1 19626 19627 windows.8 $input=19626 $shape=19627 #19626=(36,64,192)f32 #windows.8=(36,8,8,192)f32 Tensor.view Tensor.view_2156 2 1 windows.8 19633 x3.8 $input=windows.8 $shape=19633 #windows.8=(36,8,8,192)f32 #x3.8=(1,6,6,8,8,192)f32 prim::Constant pnnx_21352 0 1 26778 value=1 prim::Constant pnnx_21353 0 1 26779 value=-1 prim::ListConstruct pnnx_21354 4 1 26778 1555 1795 26779 19638 torch.permute torch.permute_2951 2 1 x3.8 19635 19636 $input=x3.8 $dims=19635 #x3.8=(1,6,6,8,8,192)f32 #19636=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_319 1 1 19636 19637 memory_format=torch.contiguous_format $input=19636 #19636=(1,6,8,6,8,192)f32 #19637=(1,6,8,6,8,192)f32 prim::Constant pnnx_21356 0 1 26780 value=4 prim::Constant pnnx_21357 0 1 26781 value=4 prim::ListConstruct pnnx_21358 2 1 26780 26781 19640 prim::Constant pnnx_21359 0 1 26782 value=1 prim::Constant pnnx_21360 0 1 26783 value=2 prim::ListConstruct pnnx_21361 2 1 26782 26783 19641 Tensor.view Tensor.view_2157 2 1 19637 19638 shifted_x.4 $input=19637 $shape=19638 #19637=(1,6,8,6,8,192)f32 #shifted_x.4=(1,48,48,192)f32 aten::mul pnnx_21363 2 1 H1.1 W1.1 19643 aten::Int pnnx_21364 1 1 19643 19644 prim::ListConstruct pnnx_21365 3 1 19511 19644 19515 19645 prim::Constant pnnx_21367 0 1 19647 value=None prim::Constant pnnx_21368 0 1 26784 value=1 torch.roll torch.roll_2525 3 1 shifted_x.4 19640 19641 x4.8 $input=shifted_x.4 $shifts=19640 $dims=19641 #shifted_x.4=(1,48,48,192)f32 #x4.8=(1,48,48,192)f32 Tensor.view Tensor.view_2158 2 1 x4.8 19645 x5.4 $input=x4.8 $shape=19645 #x4.8=(1,48,48,192)f32 #x5.4=(1,2304,192)f32 aten::add pnnx_21369 3 1 19490 x5.4 26784 input.18 #19490=(1,2304,192)f32 #x5.4=(1,2304,192)f32 #input.18=(1,2304,192)f32 nn.LayerNorm layers_mmsa.5.residual_group.blocks.3.norm2 1 1 input.18 19649 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.18=(1,2304,192)f32 #19649=(1,2304,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.3.mlp.fc1 1 1 19649 19654 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #19649=(1,2304,192)f32 #19654=(1,2304,384)f32 nn.GELU layers_mmsa.5.residual_group.blocks.3.mlp.act 1 1 19654 19655 #19654=(1,2304,384)f32 #19655=(1,2304,384)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.3.mlp.drop 1 1 19655 19656 #19655=(1,2304,384)f32 #19656=(1,2304,384)f32 nn.Linear layers_mmsa.5.residual_group.blocks.3.mlp.fc2 1 1 19656 19657 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #19656=(1,2304,384)f32 #19657=(1,2304,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.3.mlp.drop 1 1 19657 19658 #19657=(1,2304,192)f32 #19658=(1,2304,192)f32 prim::Constant pnnx_21370 0 1 19659 value=None prim::Constant pnnx_21371 0 1 26785 value=1 aten::add pnnx_21372 3 1 input.18 19658 26785 19660 #input.18=(1,2304,192)f32 #19658=(1,2304,192)f32 #19660=(1,2304,192)f32 prim::Constant pnnx_21373 0 1 19661 value=trunc prim::Constant pnnx_21374 0 1 19662 value=8 prim::Constant pnnx_21375 0 1 19663 value=0 prim::Constant pnnx_21376 0 1 19664 value=2 prim::Constant pnnx_21377 0 1 19665 value=1 prim::Constant pnnx_21378 0 1 19666 value=3 prim::Constant pnnx_21379 0 1 19667 value=8 prim::Constant pnnx_21380 0 1 19668 value=4 prim::Constant pnnx_21381 0 1 19669 value=5 prim::Constant pnnx_21382 0 1 19670 value=-1 prim::Constant pnnx_21383 0 1 19671 value=64 aten::size pnnx_21384 2 1 19660 19663 19677 #19660=(1,2304,192)f32 prim::NumToTensor pnnx_21385 1 1 19677 B.10 aten::Int pnnx_21386 1 1 B.10 19679 aten::Int pnnx_21387 1 1 B.10 19680 aten::size pnnx_21388 2 1 19660 19664 19681 #19660=(1,2304,192)f32 prim::NumToTensor pnnx_21389 1 1 19681 C.26 aten::Int pnnx_21390 1 1 C.26 19683 aten::Int pnnx_21391 1 1 C.26 19684 aten::Int pnnx_21392 1 1 C.26 19685 aten::Int pnnx_21393 1 1 C.26 19686 nn.LayerNorm layers_mmsa.5.residual_group.blocks.4.norm1 1 1 19660 19687 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19660=(1,2304,192)f32 #19687=(1,2304,192)f32 prim::ListConstruct pnnx_21394 4 1 19680 1552 1792 19686 19688 prim::Constant pnnx_21396 0 1 26786 value=0 Tensor.view Tensor.view_2159 2 1 19687 19688 x.10 $input=19687 $shape=19688 #19687=(1,2304,192)f32 #x.10=(1,48,48,192)f32 aten::size pnnx_21397 2 1 x.10 26786 19690 #x.10=(1,48,48,192)f32 prim::NumToTensor pnnx_21398 1 1 19690 B0.10 aten::Int pnnx_21399 1 1 B0.10 19692 aten::size pnnx_21400 2 1 x.10 19665 19693 #x.10=(1,48,48,192)f32 prim::NumToTensor pnnx_21401 1 1 19693 19694 prim::Constant pnnx_21402 0 1 26787 value=2 aten::size pnnx_21403 2 1 x.10 26787 19695 #x.10=(1,48,48,192)f32 prim::NumToTensor pnnx_21404 1 1 19695 19696 aten::size pnnx_21405 2 1 x.10 19666 19697 #x.10=(1,48,48,192)f32 prim::NumToTensor pnnx_21406 1 1 19697 C0.10 aten::Int pnnx_21407 1 1 C0.10 19699 aten::Int pnnx_21408 1 1 C0.10 19700 aten::div pnnx_21409 3 1 19694 19662 19661 19701 aten::Int pnnx_21410 1 1 19701 19702 prim::Constant pnnx_21411 0 1 26788 value=8 prim::Constant pnnx_21412 0 1 26789 value=trunc aten::div pnnx_21413 3 1 19696 26788 26789 19703 aten::Int pnnx_21414 1 1 19703 19704 prim::Constant pnnx_21415 0 1 26790 value=8 prim::ListConstruct pnnx_21416 6 1 19692 19702 19667 19704 26790 19700 19705 prim::Constant pnnx_21418 0 1 26791 value=0 prim::Constant pnnx_21419 0 1 26792 value=1 prim::Constant pnnx_21420 0 1 26793 value=3 prim::Constant pnnx_21421 0 1 26794 value=2 prim::ListConstruct pnnx_21422 6 1 26791 26792 26793 26794 19668 19669 19707 Tensor.view Tensor.view_2160 2 1 x.10 19705 x0.10 $input=x.10 $shape=19705 #x.10=(1,48,48,192)f32 #x0.10=(1,6,8,6,8,192)f32 prim::Constant pnnx_21426 0 1 26796 value=8 prim::Constant pnnx_21427 0 1 26797 value=8 prim::ListConstruct pnnx_21428 4 1 19670 26796 26797 19699 19710 torch.permute torch.permute_2952 2 1 x0.10 19707 19708 $input=x0.10 $dims=19707 #x0.10=(1,6,8,6,8,192)f32 #19708=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_320 1 1 19708 19709 memory_format=torch.contiguous_format $input=19708 #19708=(1,6,6,8,8,192)f32 #19709=(1,6,6,8,8,192)f32 prim::Constant pnnx_21430 0 1 26798 value=-1 prim::ListConstruct pnnx_21431 3 1 26798 19671 19685 19712 prim::Constant pnnx_21433 0 1 19714 value=1.767767e-01 prim::Constant pnnx_21434 0 1 19715 value=trunc prim::Constant pnnx_21435 0 1 19716 value=6 prim::Constant pnnx_21436 0 1 19717 value=0 prim::Constant pnnx_21437 0 1 19718 value=1 prim::Constant pnnx_21438 0 1 19719 value=2 prim::Constant pnnx_21439 0 1 19720 value=3 prim::Constant pnnx_21440 0 1 19721 value=6 prim::Constant pnnx_21441 0 1 19722 value=4 prim::Constant pnnx_21442 0 1 19723 value=-2 prim::Constant pnnx_21443 0 1 19724 value=-1 prim::Constant pnnx_21444 0 1 19725 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.4.attn 0 1 relative_position_bias_table.10 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.10=(225,6)f32 pnnx.Attribute layers_mmsa.5.residual_group.blocks.4.attn 0 1 relative_position_index.10 @relative_position_index=(64,64)i64 #relative_position_index.10=(64,64)i64 Tensor.view Tensor.view_2161 2 1 19709 19710 x_windows.10 $input=19709 $shape=19710 #19709=(1,6,6,8,8,192)f32 #x_windows.10=(36,8,8,192)f32 Tensor.view Tensor.view_2162 2 1 x_windows.10 19712 x1.10 $input=x_windows.10 $shape=19712 #x_windows.10=(36,8,8,192)f32 #x1.10=(36,64,192)f32 aten::size pnnx_21445 2 1 x1.10 19717 19733 #x1.10=(36,64,192)f32 prim::NumToTensor pnnx_21446 1 1 19733 B_.10 aten::Int pnnx_21447 1 1 B_.10 19735 aten::Int pnnx_21448 1 1 B_.10 19736 aten::size pnnx_21449 2 1 x1.10 19718 19737 #x1.10=(36,64,192)f32 prim::NumToTensor pnnx_21450 1 1 19737 N.10 aten::Int pnnx_21451 1 1 N.10 19739 aten::Int pnnx_21452 1 1 N.10 19740 aten::size pnnx_21453 2 1 x1.10 19719 19741 #x1.10=(36,64,192)f32 prim::NumToTensor pnnx_21454 1 1 19741 C.28 aten::Int pnnx_21455 1 1 C.28 19743 nn.Linear layers_mmsa.5.residual_group.blocks.4.attn.qkv 1 1 x1.10 19744 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x1.10=(36,64,192)f32 #19744=(36,64,576)f32 aten::div pnnx_21456 3 1 C.28 19716 19715 19745 aten::Int pnnx_21457 1 1 19745 19746 prim::ListConstruct pnnx_21458 5 1 19736 19740 19720 19721 19746 19747 prim::Constant pnnx_21460 0 1 26799 value=2 prim::Constant pnnx_21461 0 1 26800 value=0 prim::Constant pnnx_21462 0 1 26801 value=3 prim::Constant pnnx_21463 0 1 26802 value=1 prim::ListConstruct pnnx_21464 5 1 26799 26800 26801 26802 19722 19749 Tensor.reshape Tensor.reshape_646 2 1 19744 19747 19748 $input=19744 $shape=19747 #19744=(36,64,576)f32 #19748=(36,64,3,6,32)f32 prim::Constant pnnx_21466 0 1 26803 value=0 prim::Constant pnnx_21467 0 1 26804 value=0 prim::Constant pnnx_21469 0 1 26805 value=0 prim::Constant pnnx_21470 0 1 26806 value=1 prim::Constant pnnx_21472 0 1 26807 value=0 prim::Constant pnnx_21473 0 1 26808 value=2 torch.permute torch.permute_2953 2 1 19748 19749 qkv0.10 $input=19748 $dims=19749 #19748=(36,64,3,6,32)f32 #qkv0.10=(3,36,6,64,32)f32 Tensor.select Tensor.select_968 3 1 qkv0.10 26803 26804 q.10 $input=qkv0.10 $dim=26803 $index=26804 #qkv0.10=(3,36,6,64,32)f32 #q.10=(36,6,64,32)f32 aten::mul pnnx_21475 2 1 q.10 19714 q0.10 #q.10=(36,6,64,32)f32 #q0.10=(36,6,64,32)f32 Tensor.select Tensor.select_969 3 1 qkv0.10 26805 26806 k.10 $input=qkv0.10 $dim=26805 $index=26806 #qkv0.10=(3,36,6,64,32)f32 #k.10=(36,6,64,32)f32 prim::Constant pnnx_21478 0 1 26809 value=-1 prim::ListConstruct pnnx_21479 1 1 26809 19757 Tensor.view Tensor.view_2163 2 1 relative_position_index.10 19757 19758 $input=relative_position_index.10 $shape=19757 #relative_position_index.10=(64,64)i64 #19758=(4096)i64 prim::ListConstruct pnnx_21481 1 1 19758 19759 #19758=(4096)i64 prim::Constant pnnx_21483 0 1 26810 value=64 prim::Constant pnnx_21484 0 1 26811 value=-1 prim::ListConstruct pnnx_21485 3 1 19725 26810 26811 19761 Tensor.index Tensor.index_432 2 1 relative_position_bias_table.10 19759 19760 $input=relative_position_bias_table.10 $expr=19759 #relative_position_bias_table.10=(225,6)f32 #19760=(4096,6)f32 prim::Constant pnnx_21487 0 1 26812 value=2 prim::Constant pnnx_21488 0 1 26813 value=0 prim::Constant pnnx_21489 0 1 26814 value=1 prim::ListConstruct pnnx_21490 3 1 26812 26813 26814 19763 Tensor.view Tensor.view_2164 2 1 19760 19761 relative_position_bias.10 $input=19760 $shape=19761 #19760=(4096,6)f32 #relative_position_bias.10=(64,64,6)f32 prim::Constant pnnx_21494 0 1 26816 value=0 torch.permute torch.permute_2954 2 1 relative_position_bias.10 19763 19764 $input=relative_position_bias.10 $dims=19763 #relative_position_bias.10=(64,64,6)f32 #19764=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_321 1 1 19764 relative_position_bias0.10 memory_format=torch.contiguous_format $input=19764 #19764=(6,64,64)f32 #relative_position_bias0.10=(6,64,64)f32 prim::Constant pnnx_21496 0 1 26817 value=1 torch.transpose torch.transpose_3211 3 1 k.10 19723 19724 19755 $input=k.10 $dim0=19723 $dim1=19724 #k.10=(36,6,64,32)f32 #19755=(36,6,32,64)f32 torch.matmul torch.matmul_2416 2 1 q0.10 19755 attn.22 $input=q0.10 $other=19755 #q0.10=(36,6,64,32)f32 #19755=(36,6,32,64)f32 #attn.22=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3430 2 1 relative_position_bias0.10 26816 19766 $input=relative_position_bias0.10 $dim=26816 #relative_position_bias0.10=(6,64,64)f32 #19766=(1,6,64,64)f32 aten::add pnnx_21497 3 1 attn.22 19766 26817 input.20 #attn.22=(36,6,64,64)f32 #19766=(1,6,64,64)f32 #input.20=(36,6,64,64)f32 nn.Softmax layers_mmsa.5.residual_group.blocks.4.attn.softmax 1 1 input.20 19768 dim=-1 #input.20=(36,6,64,64)f32 #19768=(36,6,64,64)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.4.attn.attn_drop 1 1 19768 19769 #19768=(36,6,64,64)f32 #19769=(36,6,64,64)f32 Tensor.select Tensor.select_970 3 1 qkv0.10 26807 26808 v.10 $input=qkv0.10 $dim=26807 $index=26808 #qkv0.10=(3,36,6,64,32)f32 #v.10=(36,6,64,32)f32 prim::Constant pnnx_21499 0 1 26818 value=1 prim::Constant pnnx_21500 0 1 26819 value=2 torch.matmul torch.matmul_2417 2 1 19769 v.10 19770 $input=19769 $other=v.10 #19769=(36,6,64,64)f32 #v.10=(36,6,64,32)f32 #19770=(36,6,64,32)f32 prim::ListConstruct pnnx_21502 3 1 19735 19739 19743 19772 torch.transpose torch.transpose_3212 3 1 19770 26818 26819 19771 $input=19770 $dim0=26818 $dim1=26819 #19770=(36,6,64,32)f32 #19771=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_647 2 1 19771 19772 input0.10 $input=19771 $shape=19772 #19771=(36,64,6,32)f32 #input0.10=(36,64,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.4.attn.proj 1 1 input0.10 19774 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.10=(36,64,192)f32 #19774=(36,64,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.4.attn.proj_drop 1 1 19774 19775 #19774=(36,64,192)f32 #19775=(36,64,192)f32 prim::Constant pnnx_21504 0 1 26820 value=-1 prim::Constant pnnx_21505 0 1 26821 value=8 prim::Constant pnnx_21506 0 1 26822 value=8 prim::ListConstruct pnnx_21507 4 1 26820 26821 26822 19684 19776 prim::Constant pnnx_21509 0 1 26823 value=8 prim::Constant pnnx_21510 0 1 26824 value=trunc aten::div pnnx_21511 3 1 H1.1 26823 26824 19778 aten::Int pnnx_21512 1 1 19778 19779 prim::Constant pnnx_21513 0 1 26825 value=8 prim::Constant pnnx_21514 0 1 26826 value=trunc aten::div pnnx_21515 3 1 W1.1 26825 26826 19780 aten::Int pnnx_21516 1 1 19780 19781 prim::Constant pnnx_21517 0 1 26827 value=1 prim::Constant pnnx_21518 0 1 26828 value=8 prim::Constant pnnx_21519 0 1 26829 value=8 prim::Constant pnnx_21520 0 1 26830 value=-1 prim::ListConstruct pnnx_21521 6 1 26827 19779 19781 26828 26829 26830 19782 prim::Constant pnnx_21523 0 1 26831 value=0 prim::Constant pnnx_21524 0 1 26832 value=1 prim::Constant pnnx_21525 0 1 26833 value=3 prim::Constant pnnx_21526 0 1 26834 value=2 prim::Constant pnnx_21527 0 1 26835 value=4 prim::Constant pnnx_21528 0 1 26836 value=5 prim::ListConstruct pnnx_21529 6 1 26831 26832 26833 26834 26835 26836 19784 Tensor.view Tensor.view_2165 2 1 19775 19776 windows.10 $input=19775 $shape=19776 #19775=(36,64,192)f32 #windows.10=(36,8,8,192)f32 Tensor.view Tensor.view_2166 2 1 windows.10 19782 x2.10 $input=windows.10 $shape=19782 #windows.10=(36,8,8,192)f32 #x2.10=(1,6,6,8,8,192)f32 prim::Constant pnnx_21533 0 1 26838 value=1 prim::Constant pnnx_21534 0 1 26839 value=-1 prim::ListConstruct pnnx_21535 4 1 26838 1549 1789 26839 19787 torch.permute torch.permute_2955 2 1 x2.10 19784 19785 $input=x2.10 $dims=19784 #x2.10=(1,6,6,8,8,192)f32 #19785=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_322 1 1 19785 19786 memory_format=torch.contiguous_format $input=19785 #19785=(1,6,8,6,8,192)f32 #19786=(1,6,8,6,8,192)f32 aten::mul pnnx_21537 2 1 H1.1 W1.1 19789 aten::Int pnnx_21538 1 1 19789 19790 prim::ListConstruct pnnx_21539 3 1 19679 19790 19683 19791 prim::Constant pnnx_21541 0 1 19793 value=None prim::Constant pnnx_21542 0 1 26840 value=1 Tensor.view Tensor.view_2167 2 1 19786 19787 x3.10 $input=19786 $shape=19787 #19786=(1,6,8,6,8,192)f32 #x3.10=(1,48,48,192)f32 Tensor.view Tensor.view_2168 2 1 x3.10 19791 x4.10 $input=x3.10 $shape=19791 #x3.10=(1,48,48,192)f32 #x4.10=(1,2304,192)f32 aten::add pnnx_21543 3 1 19660 x4.10 26840 input.22 #19660=(1,2304,192)f32 #x4.10=(1,2304,192)f32 #input.22=(1,2304,192)f32 nn.LayerNorm layers_mmsa.5.residual_group.blocks.4.norm2 1 1 input.22 19795 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.22=(1,2304,192)f32 #19795=(1,2304,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.4.mlp.fc1 1 1 19795 19800 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #19795=(1,2304,192)f32 #19800=(1,2304,384)f32 nn.GELU layers_mmsa.5.residual_group.blocks.4.mlp.act 1 1 19800 19801 #19800=(1,2304,384)f32 #19801=(1,2304,384)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.4.mlp.drop 1 1 19801 19802 #19801=(1,2304,384)f32 #19802=(1,2304,384)f32 nn.Linear layers_mmsa.5.residual_group.blocks.4.mlp.fc2 1 1 19802 19803 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #19802=(1,2304,384)f32 #19803=(1,2304,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.4.mlp.drop 1 1 19803 19804 #19803=(1,2304,192)f32 #19804=(1,2304,192)f32 prim::Constant pnnx_21544 0 1 19805 value=None prim::Constant pnnx_21545 0 1 26841 value=1 aten::add pnnx_21546 3 1 input.22 19804 26841 19806 #input.22=(1,2304,192)f32 #19804=(1,2304,192)f32 #19806=(1,2304,192)f32 prim::Constant pnnx_21547 0 1 19807 value=trunc prim::Constant pnnx_21548 0 1 19808 value=8 prim::Constant pnnx_21549 0 1 19809 value=0 prim::Constant pnnx_21550 0 1 19810 value=2 prim::Constant pnnx_21551 0 1 19811 value=-4 prim::Constant pnnx_21552 0 1 19812 value=1 prim::Constant pnnx_21553 0 1 19813 value=3 prim::Constant pnnx_21554 0 1 19814 value=8 prim::Constant pnnx_21555 0 1 19815 value=4 prim::Constant pnnx_21556 0 1 19816 value=5 prim::Constant pnnx_21557 0 1 19817 value=-1 prim::Constant pnnx_21558 0 1 19818 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.5 0 1 attn_mask.1 @attn_mask=(36,64,64)f32 #attn_mask.1=(36,64,64)f32 aten::size pnnx_21559 2 1 19806 19809 19825 #19806=(1,2304,192)f32 prim::NumToTensor pnnx_21560 1 1 19825 B.3 aten::Int pnnx_21561 1 1 B.3 19827 aten::Int pnnx_21562 1 1 B.3 19828 aten::size pnnx_21563 2 1 19806 19810 19829 #19806=(1,2304,192)f32 prim::NumToTensor pnnx_21564 1 1 19829 C.9 aten::Int pnnx_21565 1 1 C.9 19831 aten::Int pnnx_21566 1 1 C.9 19832 aten::Int pnnx_21567 1 1 C.9 19833 aten::Int pnnx_21568 1 1 C.9 19834 nn.LayerNorm layers_mmsa.5.residual_group.blocks.5.norm1 1 1 19806 19835 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19806=(1,2304,192)f32 #19835=(1,2304,192)f32 prim::ListConstruct pnnx_21569 4 1 19828 1546 1786 19834 19836 prim::Constant pnnx_21571 0 1 26842 value=-4 prim::ListConstruct pnnx_21572 2 1 19811 26842 19838 prim::Constant pnnx_21573 0 1 26843 value=2 prim::ListConstruct pnnx_21574 2 1 19812 26843 19839 Tensor.view Tensor.view_2169 2 1 19835 19836 x.1 $input=19835 $shape=19836 #19835=(1,2304,192)f32 #x.1=(1,48,48,192)f32 prim::Constant pnnx_21576 0 1 26844 value=0 torch.roll torch.roll_2526 3 1 x.1 19838 19839 x0.1 $input=x.1 $shifts=19838 $dims=19839 #x.1=(1,48,48,192)f32 #x0.1=(1,48,48,192)f32 aten::size pnnx_21577 2 1 x0.1 26844 19841 #x0.1=(1,48,48,192)f32 prim::NumToTensor pnnx_21578 1 1 19841 B0.1 aten::Int pnnx_21579 1 1 B0.1 19843 prim::Constant pnnx_21580 0 1 26845 value=1 aten::size pnnx_21581 2 1 x0.1 26845 19844 #x0.1=(1,48,48,192)f32 prim::NumToTensor pnnx_21582 1 1 19844 19845 prim::Constant pnnx_21583 0 1 26846 value=2 aten::size pnnx_21584 2 1 x0.1 26846 19846 #x0.1=(1,48,48,192)f32 prim::NumToTensor pnnx_21585 1 1 19846 19847 aten::size pnnx_21586 2 1 x0.1 19813 19848 #x0.1=(1,48,48,192)f32 prim::NumToTensor pnnx_21587 1 1 19848 C0.1 aten::Int pnnx_21588 1 1 C0.1 19850 aten::Int pnnx_21589 1 1 C0.1 19851 aten::div pnnx_21590 3 1 19845 19808 19807 19852 aten::Int pnnx_21591 1 1 19852 19853 prim::Constant pnnx_21592 0 1 26847 value=8 prim::Constant pnnx_21593 0 1 26848 value=trunc aten::div pnnx_21594 3 1 19847 26847 26848 19854 aten::Int pnnx_21595 1 1 19854 19855 prim::Constant pnnx_21596 0 1 26849 value=8 prim::ListConstruct pnnx_21597 6 1 19843 19853 19814 19855 26849 19851 19856 prim::Constant pnnx_21599 0 1 26850 value=0 prim::Constant pnnx_21600 0 1 26851 value=1 prim::Constant pnnx_21601 0 1 26852 value=3 prim::Constant pnnx_21602 0 1 26853 value=2 prim::ListConstruct pnnx_21603 6 1 26850 26851 26852 26853 19815 19816 19858 Tensor.view Tensor.view_2170 2 1 x0.1 19856 x1.1 $input=x0.1 $shape=19856 #x0.1=(1,48,48,192)f32 #x1.1=(1,6,8,6,8,192)f32 prim::Constant pnnx_21607 0 1 26855 value=8 prim::Constant pnnx_21608 0 1 26856 value=8 prim::ListConstruct pnnx_21609 4 1 19817 26855 26856 19850 19861 torch.permute torch.permute_2956 2 1 x1.1 19858 19859 $input=x1.1 $dims=19858 #x1.1=(1,6,8,6,8,192)f32 #19859=(1,6,6,8,8,192)f32 Tensor.contiguous Tensor.contiguous_323 1 1 19859 19860 memory_format=torch.contiguous_format $input=19859 #19859=(1,6,6,8,8,192)f32 #19860=(1,6,6,8,8,192)f32 prim::Constant pnnx_21611 0 1 26857 value=-1 prim::ListConstruct pnnx_21612 3 1 26857 19818 19833 19863 prim::Constant pnnx_21614 0 1 19865 value=1.767767e-01 prim::Constant pnnx_21615 0 1 19866 value=trunc prim::Constant pnnx_21616 0 1 19867 value=6 prim::Constant pnnx_21617 0 1 19868 value=0 prim::Constant pnnx_21618 0 1 19869 value=1 prim::Constant pnnx_21619 0 1 19870 value=2 prim::Constant pnnx_21620 0 1 19871 value=3 prim::Constant pnnx_21621 0 1 19872 value=6 prim::Constant pnnx_21622 0 1 19873 value=4 prim::Constant pnnx_21623 0 1 19874 value=-2 prim::Constant pnnx_21624 0 1 19875 value=-1 prim::Constant pnnx_21625 0 1 19876 value=64 pnnx.Attribute layers_mmsa.5.residual_group.blocks.5.attn 0 1 relative_position_bias_table.1 @relative_position_bias_table=(225,6)f32 #relative_position_bias_table.1=(225,6)f32 pnnx.Attribute layers_mmsa.5.residual_group.blocks.5.attn 0 1 relative_position_index.1 @relative_position_index=(64,64)i64 #relative_position_index.1=(64,64)i64 Tensor.view Tensor.view_2171 2 1 19860 19861 x_windows.1 $input=19860 $shape=19861 #19860=(1,6,6,8,8,192)f32 #x_windows.1=(36,8,8,192)f32 Tensor.view Tensor.view_2172 2 1 x_windows.1 19863 x2.1 $input=x_windows.1 $shape=19863 #x_windows.1=(36,8,8,192)f32 #x2.1=(36,64,192)f32 aten::size pnnx_21626 2 1 x2.1 19868 19884 #x2.1=(36,64,192)f32 prim::NumToTensor pnnx_21627 1 1 19884 B_.1 aten::Int pnnx_21628 1 1 B_.1 19886 aten::Int pnnx_21629 1 1 B_.1 19887 aten::size pnnx_21630 2 1 x2.1 19869 19888 #x2.1=(36,64,192)f32 prim::NumToTensor pnnx_21631 1 1 19888 N.1 aten::Int pnnx_21632 1 1 N.1 19890 aten::Int pnnx_21633 1 1 N.1 19891 aten::Int pnnx_21634 1 1 N.1 19892 aten::Int pnnx_21635 1 1 N.1 19893 aten::Int pnnx_21636 1 1 N.1 19894 aten::Int pnnx_21637 1 1 N.1 19895 aten::size pnnx_21638 2 1 x2.1 19870 19896 #x2.1=(36,64,192)f32 prim::NumToTensor pnnx_21639 1 1 19896 C.1 aten::Int pnnx_21640 1 1 C.1 19898 nn.Linear layers_mmsa.5.residual_group.blocks.5.attn.qkv 1 1 x2.1 19899 bias=True in_features=192 out_features=576 @bias=(576)f32 @weight=(576,192)f32 #x2.1=(36,64,192)f32 #19899=(36,64,576)f32 aten::div pnnx_21641 3 1 C.1 19867 19866 19900 aten::Int pnnx_21642 1 1 19900 19901 prim::ListConstruct pnnx_21643 5 1 19887 19895 19871 19872 19901 19902 prim::Constant pnnx_21645 0 1 26858 value=2 prim::Constant pnnx_21646 0 1 26859 value=0 prim::Constant pnnx_21647 0 1 26860 value=3 prim::Constant pnnx_21648 0 1 26861 value=1 prim::ListConstruct pnnx_21649 5 1 26858 26859 26860 26861 19873 19904 Tensor.reshape Tensor.reshape_648 2 1 19899 19902 19903 $input=19899 $shape=19902 #19899=(36,64,576)f32 #19903=(36,64,3,6,32)f32 prim::Constant pnnx_21651 0 1 26862 value=0 prim::Constant pnnx_21652 0 1 26863 value=0 prim::Constant pnnx_21654 0 1 26864 value=0 prim::Constant pnnx_21655 0 1 26865 value=1 prim::Constant pnnx_21657 0 1 26866 value=0 prim::Constant pnnx_21658 0 1 26867 value=2 torch.permute torch.permute_2957 2 1 19903 19904 qkv0.1 $input=19903 $dims=19904 #19903=(36,64,3,6,32)f32 #qkv0.1=(3,36,6,64,32)f32 Tensor.select Tensor.select_971 3 1 qkv0.1 26862 26863 q.1 $input=qkv0.1 $dim=26862 $index=26863 #qkv0.1=(3,36,6,64,32)f32 #q.1=(36,6,64,32)f32 aten::mul pnnx_21660 2 1 q.1 19865 q0.1 #q.1=(36,6,64,32)f32 #q0.1=(36,6,64,32)f32 Tensor.select Tensor.select_972 3 1 qkv0.1 26864 26865 k.1 $input=qkv0.1 $dim=26864 $index=26865 #qkv0.1=(3,36,6,64,32)f32 #k.1=(36,6,64,32)f32 prim::Constant pnnx_21663 0 1 26868 value=-1 prim::ListConstruct pnnx_21664 1 1 26868 19912 Tensor.view Tensor.view_2173 2 1 relative_position_index.1 19912 19913 $input=relative_position_index.1 $shape=19912 #relative_position_index.1=(64,64)i64 #19913=(4096)i64 prim::ListConstruct pnnx_21666 1 1 19913 19914 #19913=(4096)i64 prim::Constant pnnx_21668 0 1 26869 value=64 prim::Constant pnnx_21669 0 1 26870 value=-1 prim::ListConstruct pnnx_21670 3 1 19876 26869 26870 19916 Tensor.index Tensor.index_433 2 1 relative_position_bias_table.1 19914 19915 $input=relative_position_bias_table.1 $expr=19914 #relative_position_bias_table.1=(225,6)f32 #19915=(4096,6)f32 prim::Constant pnnx_21672 0 1 26871 value=2 prim::Constant pnnx_21673 0 1 26872 value=0 prim::Constant pnnx_21674 0 1 26873 value=1 prim::ListConstruct pnnx_21675 3 1 26871 26872 26873 19918 Tensor.view Tensor.view_2174 2 1 19915 19916 relative_position_bias.1 $input=19915 $shape=19916 #19915=(4096,6)f32 #relative_position_bias.1=(64,64,6)f32 prim::Constant pnnx_21679 0 1 26875 value=0 torch.permute torch.permute_2958 2 1 relative_position_bias.1 19918 19919 $input=relative_position_bias.1 $dims=19918 #relative_position_bias.1=(64,64,6)f32 #19919=(6,64,64)f32 Tensor.contiguous Tensor.contiguous_324 1 1 19919 relative_position_bias0.1 memory_format=torch.contiguous_format $input=19919 #19919=(6,64,64)f32 #relative_position_bias0.1=(6,64,64)f32 prim::Constant pnnx_21681 0 1 26876 value=1 torch.transpose torch.transpose_3213 3 1 k.1 19874 19875 19910 $input=k.1 $dim0=19874 $dim1=19875 #k.1=(36,6,64,32)f32 #19910=(36,6,32,64)f32 torch.matmul torch.matmul_2418 2 1 q0.1 19910 attn.1 $input=q0.1 $other=19910 #q0.1=(36,6,64,32)f32 #19910=(36,6,32,64)f32 #attn.1=(36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3431 2 1 relative_position_bias0.1 26875 19921 $input=relative_position_bias0.1 $dim=26875 #relative_position_bias0.1=(6,64,64)f32 #19921=(1,6,64,64)f32 aten::add pnnx_21682 3 1 attn.1 19921 26876 attn0.1 #attn.1=(36,6,64,64)f32 #19921=(1,6,64,64)f32 #attn0.1=(36,6,64,64)f32 prim::Constant pnnx_21683 0 1 26877 value=0 aten::size pnnx_21684 2 1 attn_mask.1 26877 19923 #attn_mask.1=(36,64,64)f32 prim::NumToTensor pnnx_21685 1 1 19923 other.1 aten::Int pnnx_21686 1 1 other.1 19925 prim::Constant pnnx_21687 0 1 26878 value=trunc aten::div pnnx_21688 3 1 B_.1 other.1 26878 19926 aten::Int pnnx_21689 1 1 19926 19927 prim::Constant pnnx_21690 0 1 26879 value=6 prim::ListConstruct pnnx_21691 5 1 19927 19925 26879 19894 19893 19928 prim::Constant pnnx_21693 0 1 26880 value=1 prim::Constant pnnx_21695 0 1 26881 value=0 prim::Constant pnnx_21697 0 1 26882 value=1 Tensor.view Tensor.view_2175 2 1 attn0.1 19928 19929 $input=attn0.1 $shape=19928 #attn0.1=(36,6,64,64)f32 #19929=(1,36,6,64,64)f32 torch.unsqueeze torch.unsqueeze_3432 2 1 attn_mask.1 26880 19930 $input=attn_mask.1 $dim=26880 #attn_mask.1=(36,64,64)f32 #19930=(36,1,64,64)f32 torch.unsqueeze torch.unsqueeze_3433 2 1 19930 26881 19931 $input=19930 $dim=26881 #19930=(36,1,64,64)f32 #19931=(1,36,1,64,64)f32 aten::add pnnx_21698 3 1 19929 19931 26882 attn1.1 #19929=(1,36,6,64,64)f32 #19931=(1,36,1,64,64)f32 #attn1.1=(1,36,6,64,64)f32 prim::Constant pnnx_21699 0 1 26883 value=-1 prim::Constant pnnx_21700 0 1 26884 value=6 prim::ListConstruct pnnx_21701 4 1 26883 26884 19892 19891 19933 Tensor.view Tensor.view_2176 2 1 attn1.1 19933 input.3 $input=attn1.1 $shape=19933 #attn1.1=(1,36,6,64,64)f32 #input.3=(36,6,64,64)f32 nn.Softmax layers_mmsa.5.residual_group.blocks.5.attn.softmax 1 1 input.3 19935 dim=-1 #input.3=(36,6,64,64)f32 #19935=(36,6,64,64)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.5.attn.attn_drop 1 1 19935 19936 #19935=(36,6,64,64)f32 #19936=(36,6,64,64)f32 Tensor.select Tensor.select_973 3 1 qkv0.1 26866 26867 v.1 $input=qkv0.1 $dim=26866 $index=26867 #qkv0.1=(3,36,6,64,32)f32 #v.1=(36,6,64,32)f32 prim::Constant pnnx_21704 0 1 26885 value=1 prim::Constant pnnx_21705 0 1 26886 value=2 torch.matmul torch.matmul_2419 2 1 19936 v.1 19937 $input=19936 $other=v.1 #19936=(36,6,64,64)f32 #v.1=(36,6,64,32)f32 #19937=(36,6,64,32)f32 prim::ListConstruct pnnx_21707 3 1 19886 19890 19898 19939 torch.transpose torch.transpose_3214 3 1 19937 26885 26886 19938 $input=19937 $dim0=26885 $dim1=26886 #19937=(36,6,64,32)f32 #19938=(36,64,6,32)f32 Tensor.reshape Tensor.reshape_649 2 1 19938 19939 input0.1 $input=19938 $shape=19939 #19938=(36,64,6,32)f32 #input0.1=(36,64,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.5.attn.proj 1 1 input0.1 19941 bias=True in_features=192 out_features=192 @bias=(192)f32 @weight=(192,192)f32 #input0.1=(36,64,192)f32 #19941=(36,64,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.5.attn.proj_drop 1 1 19941 19942 #19941=(36,64,192)f32 #19942=(36,64,192)f32 prim::Constant pnnx_21709 0 1 26887 value=-1 prim::Constant pnnx_21710 0 1 26888 value=8 prim::Constant pnnx_21711 0 1 26889 value=8 prim::ListConstruct pnnx_21712 4 1 26887 26888 26889 19832 19943 prim::Constant pnnx_21714 0 1 26890 value=8 prim::Constant pnnx_21715 0 1 26891 value=trunc aten::div pnnx_21716 3 1 H1.1 26890 26891 19945 aten::Int pnnx_21717 1 1 19945 19946 prim::Constant pnnx_21718 0 1 26892 value=8 prim::Constant pnnx_21719 0 1 26893 value=trunc aten::div pnnx_21720 3 1 W1.1 26892 26893 19947 aten::Int pnnx_21721 1 1 19947 19948 prim::Constant pnnx_21722 0 1 26894 value=1 prim::Constant pnnx_21723 0 1 26895 value=8 prim::Constant pnnx_21724 0 1 26896 value=8 prim::Constant pnnx_21725 0 1 26897 value=-1 prim::ListConstruct pnnx_21726 6 1 26894 19946 19948 26895 26896 26897 19949 prim::Constant pnnx_21728 0 1 26898 value=0 prim::Constant pnnx_21729 0 1 26899 value=1 prim::Constant pnnx_21730 0 1 26900 value=3 prim::Constant pnnx_21731 0 1 26901 value=2 prim::Constant pnnx_21732 0 1 26902 value=4 prim::Constant pnnx_21733 0 1 26903 value=5 prim::ListConstruct pnnx_21734 6 1 26898 26899 26900 26901 26902 26903 19951 Tensor.view Tensor.view_2177 2 1 19942 19943 windows.1 $input=19942 $shape=19943 #19942=(36,64,192)f32 #windows.1=(36,8,8,192)f32 Tensor.view Tensor.view_2178 2 1 windows.1 19949 x3.1 $input=windows.1 $shape=19949 #windows.1=(36,8,8,192)f32 #x3.1=(1,6,6,8,8,192)f32 prim::Constant pnnx_21738 0 1 26905 value=1 prim::Constant pnnx_21739 0 1 26906 value=-1 prim::ListConstruct pnnx_21740 4 1 26905 1543 1783 26906 19954 torch.permute torch.permute_2959 2 1 x3.1 19951 19952 $input=x3.1 $dims=19951 #x3.1=(1,6,6,8,8,192)f32 #19952=(1,6,8,6,8,192)f32 Tensor.contiguous Tensor.contiguous_325 1 1 19952 19953 memory_format=torch.contiguous_format $input=19952 #19952=(1,6,8,6,8,192)f32 #19953=(1,6,8,6,8,192)f32 prim::Constant pnnx_21742 0 1 26907 value=4 prim::Constant pnnx_21743 0 1 26908 value=4 prim::ListConstruct pnnx_21744 2 1 26907 26908 19956 prim::Constant pnnx_21745 0 1 26909 value=1 prim::Constant pnnx_21746 0 1 26910 value=2 prim::ListConstruct pnnx_21747 2 1 26909 26910 19957 Tensor.view Tensor.view_2179 2 1 19953 19954 shifted_x.1 $input=19953 $shape=19954 #19953=(1,6,8,6,8,192)f32 #shifted_x.1=(1,48,48,192)f32 aten::mul pnnx_21749 2 1 H1.1 W1.1 19959 aten::Int pnnx_21750 1 1 19959 19960 prim::ListConstruct pnnx_21751 3 1 19827 19960 19831 19961 prim::Constant pnnx_21753 0 1 19963 value=None prim::Constant pnnx_21754 0 1 26911 value=1 torch.roll torch.roll_2527 3 1 shifted_x.1 19956 19957 x4.1 $input=shifted_x.1 $shifts=19956 $dims=19957 #shifted_x.1=(1,48,48,192)f32 #x4.1=(1,48,48,192)f32 Tensor.view Tensor.view_2180 2 1 x4.1 19961 x5.1 $input=x4.1 $shape=19961 #x4.1=(1,48,48,192)f32 #x5.1=(1,2304,192)f32 aten::add pnnx_21755 3 1 19806 x5.1 26911 input.5 #19806=(1,2304,192)f32 #x5.1=(1,2304,192)f32 #input.5=(1,2304,192)f32 nn.LayerNorm layers_mmsa.5.residual_group.blocks.5.norm2 1 1 input.5 19965 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #input.5=(1,2304,192)f32 #19965=(1,2304,192)f32 nn.Linear layers_mmsa.5.residual_group.blocks.5.mlp.fc1 1 1 19965 19970 bias=True in_features=192 out_features=384 @bias=(384)f32 @weight=(384,192)f32 #19965=(1,2304,192)f32 #19970=(1,2304,384)f32 nn.GELU layers_mmsa.5.residual_group.blocks.5.mlp.act 1 1 19970 19971 #19970=(1,2304,384)f32 #19971=(1,2304,384)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.5.mlp.drop 1 1 19971 19972 #19971=(1,2304,384)f32 #19972=(1,2304,384)f32 nn.Linear layers_mmsa.5.residual_group.blocks.5.mlp.fc2 1 1 19972 19973 bias=True in_features=384 out_features=192 @bias=(192)f32 @weight=(192,384)f32 #19972=(1,2304,384)f32 #19973=(1,2304,192)f32 nn.Dropout layers_mmsa.5.residual_group.blocks.5.mlp.drop 1 1 19973 19974 #19973=(1,2304,192)f32 #19974=(1,2304,192)f32 prim::Constant pnnx_21756 0 1 19975 value=None prim::Constant pnnx_21757 0 1 26912 value=1 aten::add pnnx_21758 3 1 input.5 19974 26912 19976 #input.5=(1,2304,192)f32 #19974=(1,2304,192)f32 #19976=(1,2304,192)f32 prim::Constant pnnx_21759 0 1 19977 value=0 prim::Constant pnnx_21760 0 1 19978 value=1 prim::Constant pnnx_21761 0 1 19979 value=2 prim::Constant pnnx_21762 0 1 19980 value=192 aten::size pnnx_21763 2 1 19976 19977 19981 #19976=(1,2304,192)f32 prim::NumToTensor pnnx_21764 1 1 19981 B.247 aten::Int pnnx_21765 1 1 B.247 19983 prim::ListConstruct pnnx_21767 4 1 19983 19980 1540 1780 19985 torch.transpose torch.transpose_3215 3 1 19976 19978 19979 19984 $input=19976 $dim0=19978 $dim1=19979 #19976=(1,2304,192)f32 #19984=(1,192,2304)f32 Tensor.view Tensor.view_2181 2 1 19984 19985 input.463 $input=19984 $shape=19985 #19984=(1,192,2304)f32 #input.463=(1,192,48,48)f32 nn.Conv2d layers_mmsa.5.conv 1 1 input.463 19987 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.463=(1,192,48,48)f32 #19987=(1,192,48,48)f32 prim::Constant pnnx_21769 0 1 19988 value=-1 prim::Constant pnnx_21770 0 1 19989 value=2 prim::Constant pnnx_21771 0 1 19990 value=1 prim::Constant pnnx_21773 0 1 26913 value=2 torch.flatten torch.flatten_2203 3 1 19987 19989 19988 19991 $input=19987 $start_dim=19989 $end_dim=19988 #19987=(1,192,48,48)f32 #19991=(1,192,2304)f32 torch.transpose torch.transpose_3216 3 1 19991 19990 26913 19992 $input=19991 $dim0=19990 $dim1=26913 #19991=(1,192,2304)f32 #19992=(1,2304,192)f32 aten::add pnnx_21775 3 1 19992 19011 19012 19993 #19992=(1,2304,192)f32 #19011=(1,2304,192)f32 #19993=(1,2304,192)f32 nn.LayerNorm norm_mmsa 1 1 19993 2206 elementwise_affine=True eps=1.000000e-05 normalized_shape=(192) @bias=(192)f32 @weight=(192)f32 #19993=(1,2304,192)f32 #2206=(1,2304,192)f32 prim::Constant pnnx_21776 0 1 19994 value=0 prim::Constant pnnx_21777 0 1 19995 value=1 prim::Constant pnnx_21778 0 1 19996 value=2 prim::Constant pnnx_21779 0 1 19997 value=192 aten::size pnnx_21780 2 1 2206 19994 19998 #2206=(1,2304,192)f32 prim::NumToTensor pnnx_21781 1 1 19998 B.1 aten::Int pnnx_21782 1 1 B.1 20000 prim::ListConstruct pnnx_21784 4 1 20000 19997 1537 1777 20002 torch.transpose torch.transpose_3217 3 1 2206 19995 19996 20001 $input=2206 $dim0=19995 $dim1=19996 #2206=(1,2304,192)f32 #20001=(1,192,2304)f32 Tensor.view Tensor.view_2182 2 1 20001 20002 input.1 $input=20001 $shape=20002 #20001=(1,192,2304)f32 #input.1=(1,192,48,48)f32 nn.Conv2d conv_after_body_mmsa 1 1 input.1 2212 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=192 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(192)f32 @weight=(192,192,3,3)f32 #input.1=(1,192,48,48)f32 #2212=(1,192,48,48)f32 prim::Constant pnnx_21786 0 1 26914 value=1 aten::add pnnx_21787 3 1 2212 14094 26914 input1.3 #2212=(1,192,48,48)f32 #14094=(1,192,48,48)f32 #input1.3=(1,192,48,48)f32 nn.ConvTranspose2d upsample_conv 1 1 input1.3 2220 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(8,8) out_channels=192 output_padding=(0,0) padding=(0,0) stride=(8,8) @bias=(192)f32 @weight=(192,192,8,8)f32 #input1.3=(1,192,48,48)f32 #2220=(1,192,384,384)f32 nn.Conv2d conv_last 1 1 2220 2221 bias=True dilation=(1,1) groups=1 in_channels=192 kernel_size=(3,3) out_channels=3 padding=(1,1) padding_mode=zeros stride=(1,1) @bias=(3)f32 @weight=(3,192,3,3)f32 #2220=(1,192,384,384)f32 #2221=(1,3,384,384)f32 torch.zeros_like torch.zeros_like_3434 1 1 x_a.1 2228 $input=x_a.1 #x_a.1=(1,192,48,48)f32 #2228=(1,192,48,48)f32 prim::TupleConstruct pnnx_21790 4 1 2221 x_a.1 x_b.1 2228 2233 #2221=(1,3,384,384)f32 #x_a.1=(1,192,48,48)f32 #x_b.1=(1,192,48,48)f32 #2228=(1,192,48,48)f32 pnnx.Output pnnx_output_0 1 0 2233