{"cells":[{"cell_type":"markdown","metadata":{"id":"z0Sek0wtEs5n"},"source":["## 百度Baseline版本数据导入"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":30831,"status":"ok","timestamp":1720669313879,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"GTV_YDaxEsd3","outputId":"184da13e-f37f-407b-b8b8-e6291054b53b"},"outputs":[{"name":"stdout","output_type":"stream","text":["--2024-07-11 03:41:23--  https://ai-studio-online.bj.bcebos.com/v1/2dddd05e577849ad95e1fe1133d3af29d13085ac0cfd499c853ff5d9df2ac07f?responseContentDisposition=attachment%3B%20filename%3Dtrain_data.zip&authorization=bce-auth-v1%2F5cfe9a5e1454405eb2a975c43eace6ec%2F2024-05-05T03%3A23%3A33Z%2F-1%2F%2F8540633c7e39fddf8471d6d8206c3b761748c58c06005acb218593a8df19d7f1\n","Resolving ai-studio-online.bj.bcebos.com (ai-studio-online.bj.bcebos.com)... 103.235.47.176, 2409:8c04:1001:1203:0:ff:b0bb:4f27\n","Connecting to ai-studio-online.bj.bcebos.com (ai-studio-online.bj.bcebos.com)|103.235.47.176|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 46031310 (44M) [application/octet-stream]\n","Saving to: ‘train_data.zip’\n","\n","train_data.zip      100%[===================>]  43.90M  11.1MB/s    in 20s     \n","\n","2024-07-11 03:41:45 (2.17 MB/s) - ‘train_data.zip’ saved [46031310/46031310]\n","\n","--2024-07-11 03:41:45--  https://ai-studio-online.bj.bcebos.com/v1/a96dc8ba8201445b966980a0a48f52705338a48e29e64c53bddb7ef8861c5123?responseContentDisposition=attachment%3B%20filename%3Dtrack_A.zip&authorization=bce-auth-v1%2F5cfe9a5e1454405eb2a975c43eace6ec%2F2024-05-06T07%3A54%3A51Z%2F-1%2F%2F17b5155bd16a8af1e4af971498082687656af7fcecfc5a8e57591b85053210ec\n","Resolving ai-studio-online.bj.bcebos.com (ai-studio-online.bj.bcebos.com)... 103.235.47.176, 2409:8c04:1001:1203:0:ff:b0bb:4f27\n","Connecting to ai-studio-online.bj.bcebos.com (ai-studio-online.bj.bcebos.com)|103.235.47.176|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 4688102 (4.5M) [application/octet-stream]\n","Saving to: ‘track_A.zip’\n","\n","track_A.zip         100%[===================>]   4.47M   791KB/s    in 6.9s    \n","\n","2024-07-11 03:41:53 (666 KB/s) - ‘track_A.zip’ saved [4688102/4688102]\n","\n"]}],"source":["!wget --header=\"Host: ai-studio-online.bj.bcebos.com\" --header=\"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0\" --header=\"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7\" --header=\"Accept-Language: zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\" --header=\"Referer: https://aistudio.baidu.com/\" \"https://ai-studio-online.bj.bcebos.com/v1/2dddd05e577849ad95e1fe1133d3af29d13085ac0cfd499c853ff5d9df2ac07f?responseContentDisposition=attachment%3B%20filename%3Dtrain_data.zip&authorization=bce-auth-v1%2F5cfe9a5e1454405eb2a975c43eace6ec%2F2024-05-05T03%3A23%3A33Z%2F-1%2F%2F8540633c7e39fddf8471d6d8206c3b761748c58c06005acb218593a8df19d7f1\" -c -O 'train_data.zip'\n","!wget --header=\"Host: ai-studio-online.bj.bcebos.com\" --header=\"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0\" --header=\"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7\" --header=\"Accept-Language: zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\" --header=\"Referer: https://aistudio.baidu.com/\" \"https://ai-studio-online.bj.bcebos.com/v1/a96dc8ba8201445b966980a0a48f52705338a48e29e64c53bddb7ef8861c5123?responseContentDisposition=attachment%3B%20filename%3Dtrack_A.zip&authorization=bce-auth-v1%2F5cfe9a5e1454405eb2a975c43eace6ec%2F2024-05-06T07%3A54%3A51Z%2F-1%2F%2F17b5155bd16a8af1e4af971498082687656af7fcecfc5a8e57591b85053210ec\" -c -O 'track_A.zip'\n","!wget --header=\"Host: ai-studio-online.bj.bcebos.com\" --header=\"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0\" --header=\"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7\" --header=\"Accept-Language: zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\" --header=\"Referer: https://aistudio.baidu.com/\" \"https://ai-studio-online.bj.bcebos.com/v1/38e9adf0fce84527aad3558cc3e82d0e9a251aac4c934297afae9b74d9b3d1e9?responseContentDisposition=attachment%3B%20filename%3Dtrain_track_B.zip&authorization=bce-auth-v1%2F5cfe9a5e1454405eb2a975c43eace6ec%2F2024-06-04T03%3A21%3A02Z%2F-1%2F%2Facd359add161bace603a52c7a268467406cb3c1889a7114bbb687de8002b55f6\" -c -O 'train_track_B.zip'\n","!wget --header=\"Host: ai-studio-online.bj.bcebos.com\" --header=\"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0\" --header=\"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7\" --header=\"Accept-Language: zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\" --header=\"Referer: https://aistudio.baidu.com/\" \"https://ai-studio-online.bj.bcebos.com/v1/1638f9c292b9437bb46885186407a63e584856c91f9f4c18908b87abd46471e0?responseContentDisposition=attachment%3B%20filename%3Dtrack_B.zip&authorization=bce-auth-v1%2F5cfe9a5e1454405eb2a975c43eace6ec%2F2024-05-05T03%3A02%3A25Z%2F-1%2F%2Fcfdfd6b6a9e096c761ee8e7d863d586741c69a9e6de89f9c3696706d35f8b265\" -c -O 'track_B.zip'"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":2620,"status":"ok","timestamp":1720669329839,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"OS4r3PcokLdA","outputId":"db235a92-c225-4011-b971-ca72d7f10eb2"},"outputs":[{"name":"stdout","output_type":"stream","text":["Archive:  train_data.zip\n","  inflating: data/mesh_001.ply       \n","  inflating: data/mesh_002.ply       \n","  inflating: data/mesh_004.ply       \n","  inflating: data/mesh_005.ply       \n","  inflating: data/mesh_006.ply       \n","  inflating: data/mesh_007.ply       \n","  inflating: data/mesh_008.ply       \n","  inflating: data/mesh_010.ply       \n","  inflating: data/mesh_012.ply       \n","  inflating: data/mesh_013.ply       \n","  inflating: data/mesh_017.ply       \n","  inflating: data/mesh_018.ply       \n","  inflating: data/mesh_021.ply       \n","  inflating: data/mesh_022.ply       \n","  inflating: data/mesh_023.ply       \n","  inflating: data/mesh_025.ply       \n","  inflating: data/mesh_026.ply       \n","  inflating: data/mesh_027.ply       \n","  inflating: data/mesh_028.ply       \n","  inflating: data/mesh_029.ply       \n","  inflating: data/mesh_030.ply       \n","  inflating: data/mesh_031.ply       \n","  inflating: data/mesh_032.ply       \n","  inflating: data/mesh_034.ply       \n","  inflating: data/mesh_035.ply       \n","  inflating: data/mesh_039.ply       \n","  inflating: data/mesh_040.ply       \n","  inflating: data/mesh_043.ply       \n","  inflating: data/mesh_044.ply       \n","  inflating: data/mesh_045.ply       \n","  inflating: data/mesh_046.ply       \n","  inflating: data/mesh_047.ply       \n","  inflating: data/mesh_048.ply       \n","  inflating: data/mesh_049.ply       \n","  inflating: data/mesh_050.ply       \n","  inflating: data/mesh_051.ply       \n","  inflating: data/mesh_052.ply       \n","  inflating: data/mesh_054.ply       \n","  inflating: data/mesh_055.ply       \n","  inflating: data/mesh_056.ply       \n","  inflating: data/mesh_058.ply       \n","  inflating: data/mesh_059.ply       \n","  inflating: data/mesh_060.ply       \n","  inflating: data/mesh_061.ply       \n","  inflating: data/mesh_062.ply       \n","  inflating: data/mesh_063.ply       \n","  inflating: data/mesh_064.ply       \n","  inflating: data/mesh_065.ply       \n","  inflating: data/mesh_067.ply       \n","  inflating: data/mesh_069.ply       \n","  inflating: data/mesh_070.ply       \n","  inflating: data/mesh_071.ply       \n","  inflating: data/mesh_072.ply       \n","  inflating: data/mesh_073.ply       \n","  inflating: data/mesh_074.ply       \n","  inflating: data/mesh_075.ply       \n","  inflating: data/mesh_076.ply       \n","  inflating: data/mesh_077.ply       \n","  inflating: data/mesh_078.ply       \n","  inflating: data/mesh_079.ply       \n","  inflating: data/mesh_080.ply       \n","  inflating: data/mesh_081.ply       \n","  inflating: data/mesh_083.ply       \n","  inflating: data/mesh_084.ply       \n","  inflating: data/mesh_085.ply       \n","  inflating: data/mesh_086.ply       \n","  inflating: data/mesh_087.ply       \n","  inflating: data/mesh_088.ply       \n","  inflating: data/mesh_090.ply       \n","  inflating: data/mesh_091.ply       \n","  inflating: data/mesh_092.ply       \n","  inflating: data/mesh_094.ply       \n","  inflating: data/mesh_095.ply       \n","  inflating: data/mesh_096.ply       \n","  inflating: data/mesh_097.ply       \n","  inflating: data/mesh_100.ply       \n","  inflating: data/mesh_101.ply       \n","  inflating: data/mesh_102.ply       \n","  inflating: data/mesh_105.ply       \n","  inflating: data/mesh_106.ply       \n","  inflating: data/mesh_107.ply       \n","  inflating: data/mesh_109.ply       \n","  inflating: data/mesh_110.ply       \n","  inflating: data/mesh_111.ply       \n","  inflating: data/mesh_112.ply       \n","  inflating: data/mesh_113.ply       \n","  inflating: data/mesh_114.ply       \n","  inflating: data/mesh_115.ply       \n","  inflating: data/mesh_116.ply       \n","  inflating: data/mesh_117.ply       \n","  inflating: data/mesh_118.ply       \n","  inflating: data/mesh_119.ply       \n","  inflating: data/mesh_120.ply       \n","  inflating: data/mesh_121.ply       \n","  inflating: data/mesh_123.ply       \n","  inflating: data/mesh_124.ply       \n","  inflating: data/mesh_125.ply       \n","  inflating: data/mesh_126.ply       \n","  inflating: data/mesh_127.ply       \n","  inflating: data/mesh_128.ply       \n","  inflating: data/mesh_129.ply       \n","  inflating: data/mesh_130.ply       \n","  inflating: data/mesh_131.ply       \n","  inflating: data/mesh_133.ply       \n","  inflating: data/mesh_134.ply       \n","  inflating: data/mesh_136.ply       \n","  inflating: data/mesh_137.ply       \n","  inflating: data/mesh_138.ply       \n","  inflating: data/mesh_139.ply       \n","  inflating: data/mesh_140.ply       \n","  inflating: data/mesh_141.ply       \n","  inflating: data/mesh_142.ply       \n","  inflating: data/mesh_143.ply       \n","  inflating: data/mesh_144.ply       \n","  inflating: data/mesh_145.ply       \n","  inflating: data/mesh_146.ply       \n","  inflating: data/mesh_147.ply       \n","  inflating: data/mesh_148.ply       \n","  inflating: data/mesh_149.ply       \n","  inflating: data/mesh_150.ply       \n","  inflating: data/mesh_151.ply       \n","  inflating: data/mesh_152.ply       \n","  inflating: data/mesh_153.ply       \n","  inflating: data/mesh_155.ply       \n","  inflating: data/mesh_156.ply       \n","  inflating: data/mesh_157.ply       \n","  inflating: data/mesh_158.ply       \n","  inflating: data/mesh_159.ply       \n","  inflating: data/mesh_160.ply       \n","  inflating: data/mesh_161.ply       \n","  inflating: data/mesh_162.ply       \n","  inflating: data/mesh_163.ply       \n","  inflating: data/mesh_165.ply       \n","  inflating: data/mesh_166.ply       \n","  inflating: data/mesh_170.ply       \n","  inflating: data/mesh_172.ply       \n","  inflating: data/mesh_173.ply       \n","  inflating: data/mesh_175.ply       \n","  inflating: data/mesh_176.ply       \n","  inflating: data/mesh_177.ply       \n","  inflating: data/mesh_178.ply       \n","  inflating: data/mesh_179.ply       \n","  inflating: data/mesh_180.ply       \n","  inflating: data/mesh_181.ply       \n","  inflating: data/mesh_182.ply       \n","  inflating: data/mesh_183.ply       \n","  inflating: data/mesh_184.ply       \n","  inflating: data/mesh_186.ply       \n","  inflating: data/mesh_190.ply       \n","  inflating: data/mesh_191.ply       \n","  inflating: data/mesh_192.ply       \n","  inflating: data/mesh_193.ply       \n","  inflating: data/mesh_195.ply       \n","  inflating: data/mesh_196.ply       \n","  inflating: data/mesh_198.ply       \n","  inflating: data/mesh_199.ply       \n","  inflating: data/mesh_200.ply       \n","  inflating: data/mesh_201.ply       \n","  inflating: data/mesh_202.ply       \n","  inflating: data/mesh_203.ply       \n","  inflating: data/mesh_205.ply       \n","  inflating: data/mesh_207.ply       \n","  inflating: data/mesh_210.ply       \n","  inflating: data/mesh_211.ply       \n","  inflating: data/mesh_212.ply       \n","  inflating: data/mesh_213.ply       \n","  inflating: data/mesh_214.ply       \n","  inflating: data/mesh_215.ply       \n","  inflating: data/mesh_217.ply       \n","  inflating: data/mesh_219.ply       \n","  inflating: data/mesh_220.ply       \n","  inflating: data/mesh_221.ply       \n","  inflating: data/mesh_222.ply       \n","  inflating: data/mesh_223.ply       \n","  inflating: data/mesh_224.ply       \n","  inflating: data/mesh_225.ply       \n","  inflating: data/mesh_227.ply       \n","  inflating: data/mesh_228.ply       \n","  inflating: data/mesh_229.ply       \n","  inflating: data/mesh_230.ply       \n","  inflating: data/mesh_231.ply       \n","  inflating: data/mesh_232.ply       \n","  inflating: data/mesh_233.ply       \n","  inflating: data/mesh_234.ply       \n","  inflating: data/mesh_235.ply       \n","  inflating: data/mesh_236.ply       \n","  inflating: data/mesh_237.ply       \n","  inflating: data/mesh_241.ply       \n","  inflating: data/mesh_243.ply       \n","  inflating: data/mesh_244.ply       \n","  inflating: data/mesh_245.ply       \n","  inflating: data/mesh_246.ply       \n","  inflating: data/mesh_247.ply       \n","  inflating: data/mesh_248.ply       \n","  inflating: data/mesh_249.ply       \n","  inflating: data/mesh_251.ply       \n","  inflating: data/mesh_252.ply       \n","  inflating: data/mesh_253.ply       \n","  inflating: data/mesh_255.ply       \n","  inflating: data/mesh_257.ply       \n","  inflating: data/mesh_258.ply       \n","  inflating: data/mesh_259.ply       \n","  inflating: data/mesh_260.ply       \n","  inflating: data/mesh_261.ply       \n","  inflating: data/mesh_262.ply       \n","  inflating: data/mesh_263.ply       \n","  inflating: data/mesh_264.ply       \n","  inflating: data/mesh_266.ply       \n","  inflating: data/mesh_267.ply       \n","  inflating: data/mesh_268.ply       \n","  inflating: data/mesh_269.ply       \n","  inflating: data/mesh_271.ply       \n","  inflating: data/mesh_272.ply       \n","  inflating: data/mesh_273.ply       \n","  inflating: data/mesh_274.ply       \n","  inflating: data/mesh_275.ply       \n","  inflating: data/mesh_276.ply       \n","  inflating: data/mesh_277.ply       \n","  inflating: data/mesh_278.ply       \n","  inflating: data/mesh_279.ply       \n","  inflating: data/mesh_280.ply       \n","  inflating: data/mesh_281.ply       \n","  inflating: data/mesh_282.ply       \n","  inflating: data/mesh_283.ply       \n","  inflating: data/mesh_285.ply       \n","  inflating: data/mesh_286.ply       \n","  inflating: data/mesh_289.ply       \n","  inflating: data/mesh_290.ply       \n","  inflating: data/mesh_291.ply       \n","  inflating: data/mesh_292.ply       \n","  inflating: data/mesh_293.ply       \n","  inflating: data/mesh_294.ply       \n","  inflating: data/mesh_295.ply       \n","  inflating: data/mesh_296.ply       \n","  inflating: data/mesh_297.ply       \n","  inflating: data/mesh_298.ply       \n","  inflating: data/mesh_299.ply       \n","  inflating: data/mesh_300.ply       \n","  inflating: data/mesh_301.ply       \n","  inflating: data/mesh_302.ply       \n","  inflating: data/mesh_304.ply       \n","  inflating: data/mesh_305.ply       \n","  inflating: data/mesh_306.ply       \n","  inflating: data/mesh_308.ply       \n","  inflating: data/mesh_309.ply       \n","  inflating: data/mesh_310.ply       \n","  inflating: data/mesh_311.ply       \n","  inflating: data/mesh_312.ply       \n","  inflating: data/mesh_313.ply       \n","  inflating: data/mesh_314.ply       \n","  inflating: data/mesh_315.ply       \n","  inflating: data/mesh_319.ply       \n","  inflating: data/mesh_320.ply       \n","  inflating: data/mesh_321.ply       \n","  inflating: data/mesh_322.ply       \n","  inflating: data/mesh_323.ply       \n","  inflating: data/mesh_324.ply       \n","  inflating: data/mesh_325.ply       \n","  inflating: data/mesh_327.ply       \n","  inflating: data/mesh_328.ply       \n","  inflating: data/mesh_329.ply       \n","  inflating: data/mesh_331.ply       \n","  inflating: data/mesh_332.ply       \n","  inflating: data/mesh_333.ply       \n","  inflating: data/mesh_334.ply       \n","  inflating: data/mesh_335.ply       \n","  inflating: data/mesh_337.ply       \n","  inflating: data/mesh_338.ply       \n","  inflating: data/mesh_339.ply       \n","  inflating: data/mesh_340.ply       \n","  inflating: data/mesh_341.ply       \n","  inflating: data/mesh_344.ply       \n","  inflating: data/mesh_345.ply       \n","  inflating: data/mesh_347.ply       \n","  inflating: data/mesh_348.ply       \n","  inflating: data/mesh_349.ply       \n","  inflating: data/mesh_350.ply       \n","  inflating: data/mesh_352.ply       \n","  inflating: data/mesh_353.ply       \n","  inflating: data/mesh_354.ply       \n","  inflating: data/mesh_355.ply       \n","  inflating: data/mesh_356.ply       \n","  inflating: data/mesh_357.ply       \n","  inflating: data/mesh_358.ply       \n","  inflating: data/mesh_360.ply       \n","  inflating: data/mesh_362.ply       \n","  inflating: data/mesh_364.ply       \n","  inflating: data/mesh_365.ply       \n","  inflating: data/mesh_366.ply       \n","  inflating: data/mesh_367.ply       \n","  inflating: data/mesh_369.ply       \n","  inflating: data/mesh_371.ply       \n","  inflating: data/mesh_372.ply       \n","  inflating: data/mesh_373.ply       \n","  inflating: data/mesh_374.ply       \n","  inflating: data/mesh_375.ply       \n","  inflating: data/mesh_376.ply       \n","  inflating: data/mesh_378.ply       \n","  inflating: data/mesh_379.ply       \n","  inflating: data/mesh_380.ply       \n","  inflating: data/mesh_381.ply       \n","  inflating: data/mesh_384.ply       \n","  inflating: data/mesh_385.ply       \n","  inflating: data/mesh_389.ply       \n","  inflating: data/mesh_392.ply       \n","  inflating: data/mesh_393.ply       \n","  inflating: data/mesh_397.ply       \n","  inflating: data/mesh_398.ply       \n","  inflating: data/mesh_399.ply       \n","  inflating: data/mesh_401.ply       \n","  inflating: data/mesh_402.ply       \n","  inflating: data/mesh_403.ply       \n","  inflating: data/mesh_404.ply       \n","  inflating: data/mesh_405.ply       \n","  inflating: data/mesh_407.ply       \n","  inflating: data/mesh_408.ply       \n","  inflating: data/mesh_410.ply       \n","  inflating: data/mesh_412.ply       \n","  inflating: data/mesh_413.ply       \n","  inflating: data/mesh_414.ply       \n","  inflating: data/mesh_415.ply       \n","  inflating: data/mesh_417.ply       \n","  inflating: data/mesh_418.ply       \n","  inflating: data/mesh_419.ply       \n","  inflating: data/mesh_420.ply       \n","  inflating: data/mesh_422.ply       \n","  inflating: data/mesh_424.ply       \n","  inflating: data/mesh_425.ply       \n","  inflating: data/mesh_427.ply       \n","  inflating: data/mesh_430.ply       \n","  inflating: data/mesh_431.ply       \n","  inflating: data/mesh_433.ply       \n","  inflating: data/mesh_435.ply       \n","  inflating: data/mesh_436.ply       \n","  inflating: data/mesh_437.ply       \n","  inflating: data/mesh_439.ply       \n","  inflating: data/mesh_440.ply       \n","  inflating: data/mesh_443.ply       \n","  inflating: data/mesh_444.ply       \n","  inflating: data/mesh_446.ply       \n","  inflating: data/mesh_447.ply       \n","  inflating: data/mesh_448.ply       \n","  inflating: data/mesh_449.ply       \n","  inflating: data/mesh_450.ply       \n","  inflating: data/mesh_451.ply       \n","  inflating: data/mesh_452.ply       \n","  inflating: data/mesh_453.ply       \n","  inflating: data/mesh_454.ply       \n","  inflating: data/mesh_455.ply       \n","  inflating: data/mesh_456.ply       \n","  inflating: data/mesh_457.ply       \n","  inflating: data/mesh_459.ply       \n","  inflating: data/mesh_460.ply       \n","  inflating: data/mesh_462.ply       \n","  inflating: data/mesh_463.ply       \n","  inflating: data/mesh_464.ply       \n","  inflating: data/mesh_465.ply       \n","  inflating: data/mesh_466.ply       \n","  inflating: data/mesh_467.ply       \n","  inflating: data/mesh_468.ply       \n","  inflating: data/mesh_469.ply       \n","  inflating: data/mesh_470.ply       \n","  inflating: data/mesh_472.ply       \n","  inflating: data/mesh_473.ply       \n","  inflating: data/mesh_474.ply       \n","  inflating: data/mesh_475.ply       \n","  inflating: data/mesh_476.ply       \n","  inflating: data/mesh_478.ply       \n","  inflating: data/mesh_479.ply       \n","  inflating: data/mesh_480.ply       \n","  inflating: data/mesh_482.ply       \n","  inflating: data/mesh_483.ply       \n","  inflating: data/mesh_486.ply       \n","  inflating: data/mesh_487.ply       \n","  inflating: data/mesh_488.ply       \n","  inflating: data/mesh_490.ply       \n","  inflating: data/mesh_493.ply       \n","  inflating: data/mesh_494.ply       \n","  inflating: data/mesh_495.ply       \n","  inflating: data/mesh_496.ply       \n","  inflating: data/mesh_497.ply       \n","  inflating: data/mesh_498.ply       \n","  inflating: data/mesh_499.ply       \n","  inflating: data/mesh_501.ply       \n","  inflating: data/mesh_502.ply       \n","  inflating: data/mesh_503.ply       \n","  inflating: data/mesh_504.ply       \n","  inflating: data/mesh_505.ply       \n","  inflating: data/mesh_507.ply       \n","  inflating: data/mesh_508.ply       \n","  inflating: data/mesh_509.ply       \n","  inflating: data/mesh_511.ply       \n","  inflating: data/mesh_512.ply       \n","  inflating: data/mesh_513.ply       \n","  inflating: data/mesh_514.ply       \n","  inflating: data/mesh_515.ply       \n","  inflating: data/mesh_516.ply       \n","  inflating: data/mesh_518.ply       \n","  inflating: data/mesh_519.ply       \n","  inflating: data/mesh_521.ply       \n","  inflating: data/mesh_522.ply       \n","  inflating: data/mesh_523.ply       \n","  inflating: data/mesh_524.ply       \n","  inflating: data/mesh_525.ply       \n","  inflating: data/mesh_527.ply       \n","  inflating: data/mesh_529.ply       \n","  inflating: data/mesh_530.ply       \n","  inflating: data/mesh_532.ply       \n","  inflating: data/mesh_533.ply       \n","  inflating: data/mesh_536.ply       \n","  inflating: data/mesh_538.ply       \n","  inflating: data/mesh_539.ply       \n","  inflating: data/mesh_540.ply       \n","  inflating: data/mesh_542.ply       \n","  inflating: data/mesh_543.ply       \n","  inflating: data/mesh_545.ply       \n","  inflating: data/mesh_547.ply       \n","  inflating: data/mesh_548.ply       \n","  inflating: data/mesh_549.ply       \n","  inflating: data/mesh_550.ply       \n","  inflating: data/mesh_551.ply       \n","  inflating: data/mesh_552.ply       \n","  inflating: data/mesh_553.ply       \n","  inflating: data/mesh_554.ply       \n","  inflating: data/mesh_555.ply       \n","  inflating: data/mesh_560.ply       \n","  inflating: data/mesh_561.ply       \n","  inflating: data/mesh_562.ply       \n","  inflating: data/mesh_564.ply       \n","  inflating: data/mesh_565.ply       \n","  inflating: data/mesh_566.ply       \n","  inflating: data/mesh_567.ply       \n","  inflating: data/mesh_568.ply       \n","  inflating: data/mesh_569.ply       \n","  inflating: data/mesh_572.ply       \n","  inflating: data/mesh_573.ply       \n","  inflating: data/mesh_574.ply       \n","  inflating: data/mesh_576.ply       \n","  inflating: data/mesh_577.ply       \n","  inflating: data/mesh_579.ply       \n","  inflating: data/mesh_581.ply       \n","  inflating: data/mesh_582.ply       \n","  inflating: data/mesh_583.ply       \n","  inflating: data/mesh_584.ply       \n","  inflating: data/mesh_587.ply       \n","  inflating: data/mesh_588.ply       \n","  inflating: data/mesh_589.ply       \n","  inflating: data/mesh_591.ply       \n","  inflating: data/mesh_593.ply       \n","  inflating: data/mesh_594.ply       \n","  inflating: data/mesh_595.ply       \n","  inflating: data/mesh_596.ply       \n","  inflating: data/mesh_597.ply       \n","  inflating: data/mesh_598.ply       \n","  inflating: data/mesh_600.ply       \n","  inflating: data/mesh_602.ply       \n","  inflating: data/mesh_604.ply       \n","  inflating: data/mesh_608.ply       \n","  inflating: data/mesh_610.ply       \n","  inflating: data/mesh_611.ply       \n","  inflating: data/mesh_612.ply       \n","  inflating: data/mesh_613.ply       \n","  inflating: data/mesh_615.ply       \n","  inflating: data/mesh_616.ply       \n","  inflating: data/mesh_617.ply       \n","  inflating: data/mesh_618.ply       \n","  inflating: data/mesh_620.ply       \n","  inflating: data/mesh_621.ply       \n","  inflating: data/mesh_622.ply       \n","  inflating: data/mesh_623.ply       \n","  inflating: data/mesh_625.ply       \n","  inflating: data/mesh_626.ply       \n","  inflating: data/mesh_627.ply       \n","  inflating: data/mesh_628.ply       \n","  inflating: data/mesh_629.ply       \n","  inflating: data/mesh_630.ply       \n","  inflating: data/mesh_631.ply       \n","  inflating: data/mesh_632.ply       \n","  inflating: data/mesh_633.ply       \n","  inflating: data/mesh_634.ply       \n","  inflating: data/mesh_635.ply       \n","  inflating: data/mesh_636.ply       \n","  inflating: data/mesh_638.ply       \n","  inflating: data/mesh_639.ply       \n","  inflating: data/mesh_640.ply       \n","  inflating: data/mesh_641.ply       \n","  inflating: data/mesh_642.ply       \n","  inflating: data/mesh_643.ply       \n","  inflating: data/mesh_644.ply       \n","  inflating: data/mesh_645.ply       \n","  inflating: data/mesh_646.ply       \n","  inflating: data/mesh_647.ply       \n","  inflating: data/mesh_648.ply       \n","  inflating: data/mesh_649.ply       \n","  inflating: data/mesh_651.ply       \n","  inflating: data/mesh_652.ply       \n","  inflating: data/mesh_654.ply       \n","  inflating: data/mesh_655.ply       \n","  inflating: data/mesh_656.ply       \n","  inflating: data/mesh_657.ply       \n","  inflating: data/press_001.npy      \n","  inflating: data/press_002.npy      \n","  inflating: data/press_004.npy      \n","  inflating: data/press_005.npy      \n","  inflating: data/press_006.npy      \n","  inflating: data/press_007.npy      \n","  inflating: data/press_008.npy      \n","  inflating: data/press_010.npy      \n","  inflating: data/press_012.npy      \n","  inflating: data/press_013.npy      \n","  inflating: data/press_017.npy      \n","  inflating: data/press_018.npy      \n","  inflating: data/press_021.npy      \n","  inflating: data/press_022.npy      \n","  inflating: data/press_023.npy      \n","  inflating: data/press_025.npy      \n","  inflating: data/press_026.npy      \n","  inflating: data/press_027.npy      \n","  inflating: data/press_028.npy      \n","  inflating: data/press_029.npy      \n","  inflating: data/press_030.npy      \n","  inflating: data/press_031.npy      \n","  inflating: data/press_032.npy      \n","  inflating: data/press_034.npy      \n","  inflating: data/press_035.npy      \n","  inflating: data/press_039.npy      \n","  inflating: data/press_040.npy      \n","  inflating: data/press_043.npy      \n","  inflating: data/press_044.npy      \n","  inflating: data/press_045.npy      \n","  inflating: data/press_046.npy      \n","  inflating: data/press_047.npy      \n","  inflating: data/press_048.npy      \n","  inflating: data/press_049.npy      \n","  inflating: data/press_050.npy      \n","  inflating: data/press_051.npy      \n","  inflating: data/press_052.npy      \n","  inflating: data/press_054.npy      \n","  inflating: data/press_055.npy      \n","  inflating: data/press_056.npy      \n","  inflating: data/press_058.npy      \n","  inflating: data/press_059.npy      \n","  inflating: data/press_060.npy      \n","  inflating: data/press_061.npy      \n","  inflating: data/press_062.npy      \n","  inflating: data/press_063.npy      \n","  inflating: data/press_064.npy      \n","  inflating: data/press_065.npy      \n","  inflating: data/press_067.npy      \n","  inflating: data/press_069.npy      \n","  inflating: data/press_070.npy      \n","  inflating: data/press_071.npy      \n","  inflating: data/press_072.npy      \n","  inflating: data/press_073.npy      \n","  inflating: data/press_074.npy      \n","  inflating: data/press_075.npy      \n","  inflating: data/press_076.npy      \n","  inflating: data/press_077.npy      \n","  inflating: data/press_078.npy      \n","  inflating: data/press_079.npy      \n","  inflating: data/press_080.npy      \n","  inflating: data/press_081.npy      \n","  inflating: data/press_083.npy      \n","  inflating: data/press_084.npy      \n","  inflating: data/press_085.npy      \n","  inflating: data/press_086.npy      \n","  inflating: data/press_087.npy      \n","  inflating: data/press_088.npy      \n","  inflating: data/press_090.npy      \n","  inflating: data/press_091.npy      \n","  inflating: data/press_092.npy      \n","  inflating: data/press_094.npy      \n","  inflating: data/press_095.npy      \n","  inflating: data/press_096.npy      \n","  inflating: data/press_097.npy      \n","  inflating: data/press_100.npy      \n","  inflating: data/press_101.npy      \n","  inflating: data/press_102.npy      \n","  inflating: data/press_105.npy      \n","  inflating: data/press_106.npy      \n","  inflating: data/press_107.npy      \n","  inflating: data/press_109.npy      \n","  inflating: data/press_110.npy      \n","  inflating: data/press_111.npy      \n","  inflating: data/press_112.npy      \n","  inflating: data/press_113.npy      \n","  inflating: data/press_114.npy      \n","  inflating: data/press_115.npy      \n","  inflating: data/press_116.npy      \n","  inflating: data/press_117.npy      \n","  inflating: data/press_118.npy      \n","  inflating: data/press_119.npy      \n","  inflating: data/press_120.npy      \n","  inflating: data/press_121.npy      \n","  inflating: data/press_123.npy      \n","  inflating: data/press_124.npy      \n","  inflating: data/press_125.npy      \n","  inflating: data/press_126.npy      \n","  inflating: data/press_127.npy      \n","  inflating: data/press_128.npy      \n","  inflating: data/press_129.npy      \n","  inflating: data/press_130.npy      \n","  inflating: data/press_131.npy      \n","  inflating: data/press_133.npy      \n","  inflating: data/press_134.npy      \n","  inflating: data/press_136.npy      \n","  inflating: data/press_137.npy      \n","  inflating: data/press_138.npy      \n","  inflating: data/press_139.npy      \n","  inflating: data/press_140.npy      \n","  inflating: data/press_141.npy      \n","  inflating: data/press_142.npy      \n","  inflating: data/press_143.npy      \n","  inflating: data/press_144.npy      \n","  inflating: data/press_145.npy      \n","  inflating: data/press_146.npy      \n","  inflating: data/press_147.npy      \n","  inflating: data/press_148.npy      \n","  inflating: data/press_149.npy      \n","  inflating: data/press_150.npy      \n","  inflating: data/press_151.npy      \n","  inflating: data/press_152.npy      \n","  inflating: data/press_153.npy      \n","  inflating: data/press_155.npy      \n","  inflating: data/press_156.npy      \n","  inflating: data/press_157.npy      \n","  inflating: data/press_158.npy      \n","  inflating: data/press_159.npy      \n","  inflating: data/press_160.npy      \n","  inflating: data/press_161.npy      \n","  inflating: data/press_162.npy      \n","  inflating: data/press_163.npy      \n","  inflating: data/press_165.npy      \n","  inflating: data/press_166.npy      \n","  inflating: data/press_170.npy      \n","  inflating: data/press_172.npy      \n","  inflating: data/press_173.npy      \n","  inflating: data/press_175.npy      \n","  inflating: data/press_176.npy      \n","  inflating: data/press_177.npy      \n","  inflating: data/press_178.npy      \n","  inflating: data/press_179.npy      \n","  inflating: data/press_180.npy      \n","  inflating: data/press_181.npy      \n","  inflating: data/press_182.npy      \n","  inflating: data/press_183.npy      \n","  inflating: data/press_184.npy      \n","  inflating: data/press_186.npy      \n","  inflating: data/press_190.npy      \n","  inflating: data/press_191.npy      \n","  inflating: data/press_192.npy      \n","  inflating: data/press_193.npy      \n","  inflating: data/press_195.npy      \n","  inflating: data/press_196.npy      \n","  inflating: data/press_198.npy      \n","  inflating: data/press_199.npy      \n","  inflating: data/press_200.npy      \n","  inflating: data/press_201.npy      \n","  inflating: data/press_202.npy      \n","  inflating: data/press_203.npy      \n","  inflating: data/press_205.npy      \n","  inflating: data/press_207.npy      \n","  inflating: data/press_210.npy      \n","  inflating: data/press_211.npy      \n","  inflating: data/press_212.npy      \n","  inflating: data/press_213.npy      \n","  inflating: data/press_214.npy      \n","  inflating: data/press_215.npy      \n","  inflating: data/press_217.npy      \n","  inflating: data/press_219.npy      \n","  inflating: data/press_220.npy      \n","  inflating: data/press_221.npy      \n","  inflating: data/press_222.npy      \n","  inflating: data/press_223.npy      \n","  inflating: data/press_224.npy      \n","  inflating: data/press_225.npy      \n","  inflating: data/press_227.npy      \n","  inflating: data/press_228.npy      \n","  inflating: data/press_229.npy      \n","  inflating: data/press_230.npy      \n","  inflating: data/press_231.npy      \n","  inflating: data/press_232.npy      \n","  inflating: data/press_233.npy      \n","  inflating: data/press_234.npy      \n","  inflating: data/press_235.npy      \n","  inflating: data/press_236.npy      \n","  inflating: data/press_237.npy      \n","  inflating: data/press_241.npy      \n","  inflating: data/press_243.npy      \n","  inflating: data/press_244.npy      \n","  inflating: data/press_245.npy      \n","  inflating: data/press_246.npy      \n","  inflating: data/press_247.npy      \n","  inflating: data/press_248.npy      \n","  inflating: data/press_249.npy      \n","  inflating: data/press_251.npy      \n","  inflating: data/press_252.npy      \n","  inflating: data/press_253.npy      \n","  inflating: data/press_255.npy      \n","  inflating: data/press_257.npy      \n","  inflating: data/press_258.npy      \n","  inflating: data/press_259.npy      \n","  inflating: data/press_260.npy      \n","  inflating: data/press_261.npy      \n","  inflating: data/press_262.npy      \n","  inflating: data/press_263.npy      \n","  inflating: data/press_264.npy      \n","  inflating: data/press_266.npy      \n","  inflating: data/press_267.npy      \n","  inflating: data/press_268.npy      \n","  inflating: data/press_269.npy      \n","  inflating: data/press_271.npy      \n","  inflating: data/press_272.npy      \n","  inflating: data/press_273.npy      \n","  inflating: data/press_274.npy      \n","  inflating: data/press_275.npy      \n","  inflating: data/press_276.npy      \n","  inflating: data/press_277.npy      \n","  inflating: data/press_278.npy      \n","  inflating: data/press_279.npy      \n","  inflating: data/press_280.npy      \n","  inflating: data/press_281.npy      \n","  inflating: data/press_282.npy      \n","  inflating: data/press_283.npy      \n","  inflating: data/press_285.npy      \n","  inflating: data/press_286.npy      \n","  inflating: data/press_289.npy      \n","  inflating: data/press_290.npy      \n","  inflating: data/press_291.npy      \n","  inflating: data/press_292.npy      \n","  inflating: data/press_293.npy      \n","  inflating: data/press_294.npy      \n","  inflating: data/press_295.npy      \n","  inflating: data/press_296.npy      \n","  inflating: data/press_297.npy      \n","  inflating: data/press_298.npy      \n","  inflating: data/press_299.npy      \n","  inflating: data/press_300.npy      \n","  inflating: data/press_301.npy      \n","  inflating: data/press_302.npy      \n","  inflating: data/press_304.npy      \n","  inflating: data/press_305.npy      \n","  inflating: data/press_306.npy      \n","  inflating: data/press_308.npy      \n","  inflating: data/press_309.npy      \n","  inflating: data/press_310.npy      \n","  inflating: data/press_311.npy      \n","  inflating: data/press_312.npy      \n","  inflating: data/press_313.npy      \n","  inflating: data/press_314.npy      \n","  inflating: data/press_315.npy      \n","  inflating: data/press_319.npy      \n","  inflating: data/press_320.npy      \n","  inflating: data/press_321.npy      \n","  inflating: data/press_322.npy      \n","  inflating: data/press_323.npy      \n","  inflating: data/press_324.npy      \n","  inflating: data/press_325.npy      \n","  inflating: data/press_327.npy      \n","  inflating: data/press_328.npy      \n","  inflating: data/press_329.npy      \n","  inflating: data/press_331.npy      \n","  inflating: data/press_332.npy      \n","  inflating: data/press_333.npy      \n","  inflating: data/press_334.npy      \n","  inflating: data/press_335.npy      \n","  inflating: data/press_337.npy      \n","  inflating: data/press_338.npy      \n","  inflating: data/press_339.npy      \n","  inflating: data/press_340.npy      \n","  inflating: data/press_341.npy      \n","  inflating: data/press_344.npy      \n","  inflating: data/press_345.npy      \n","  inflating: data/press_347.npy      \n","  inflating: data/press_348.npy      \n","  inflating: data/press_349.npy      \n","  inflating: data/press_350.npy      \n","  inflating: data/press_352.npy      \n","  inflating: data/press_353.npy      \n","  inflating: data/press_354.npy      \n","  inflating: data/press_355.npy      \n","  inflating: data/press_356.npy      \n","  inflating: data/press_357.npy      \n","  inflating: data/press_358.npy      \n","  inflating: data/press_360.npy      \n","  inflating: data/press_362.npy      \n","  inflating: data/press_364.npy      \n","  inflating: data/press_365.npy      \n","  inflating: data/press_366.npy      \n","  inflating: data/press_367.npy      \n","  inflating: data/press_369.npy      \n","  inflating: data/press_371.npy      \n","  inflating: data/press_372.npy      \n","  inflating: data/press_373.npy      \n","  inflating: data/press_374.npy      \n","  inflating: data/press_375.npy      \n","  inflating: data/press_376.npy      \n","  inflating: data/press_378.npy      \n","  inflating: data/press_379.npy      \n","  inflating: data/press_380.npy      \n","  inflating: data/press_381.npy      \n","  inflating: data/press_384.npy      \n","  inflating: data/press_385.npy      \n","  inflating: data/press_389.npy      \n","  inflating: data/press_392.npy      \n","  inflating: data/press_393.npy      \n","  inflating: data/press_397.npy      \n","  inflating: data/press_398.npy      \n","  inflating: data/press_399.npy      \n","  inflating: data/press_401.npy      \n","  inflating: data/press_402.npy      \n","  inflating: data/press_403.npy      \n","  inflating: data/press_404.npy      \n","  inflating: data/press_405.npy      \n","  inflating: data/press_407.npy      \n","  inflating: data/press_408.npy      \n","  inflating: data/press_410.npy      \n","  inflating: data/press_412.npy      \n","  inflating: data/press_413.npy      \n","  inflating: data/press_414.npy      \n","  inflating: data/press_415.npy      \n","  inflating: data/press_417.npy      \n","  inflating: data/press_418.npy      \n","  inflating: data/press_419.npy      \n","  inflating: data/press_420.npy      \n","  inflating: data/press_422.npy      \n","  inflating: data/press_424.npy      \n","  inflating: data/press_425.npy      \n","  inflating: data/press_427.npy      \n","  inflating: data/press_430.npy      \n","  inflating: data/press_431.npy      \n","  inflating: data/press_433.npy      \n","  inflating: data/press_435.npy      \n","  inflating: data/press_436.npy      \n","  inflating: data/press_437.npy      \n","  inflating: data/press_439.npy      \n","  inflating: data/press_440.npy      \n","  inflating: data/press_443.npy      \n","  inflating: data/press_444.npy      \n","  inflating: data/press_446.npy      \n","  inflating: data/press_447.npy      \n","  inflating: data/press_448.npy      \n","  inflating: data/press_449.npy      \n","  inflating: data/press_450.npy      \n","  inflating: data/press_451.npy      \n","  inflating: data/press_452.npy      \n","  inflating: data/press_453.npy      \n","  inflating: data/press_454.npy      \n","  inflating: data/press_455.npy      \n","  inflating: data/press_456.npy      \n","  inflating: data/press_457.npy      \n","  inflating: data/press_459.npy      \n","  inflating: data/press_460.npy      \n","  inflating: data/press_462.npy      \n","  inflating: data/press_463.npy      \n","  inflating: data/press_464.npy      \n","  inflating: data/press_465.npy      \n","  inflating: data/press_466.npy      \n","  inflating: data/press_467.npy      \n","  inflating: data/press_468.npy      \n","  inflating: data/press_469.npy      \n","  inflating: data/press_470.npy      \n","  inflating: data/press_472.npy      \n","  inflating: data/press_473.npy      \n","  inflating: data/press_474.npy      \n","  inflating: data/press_475.npy      \n","  inflating: data/press_476.npy      \n","  inflating: data/press_478.npy      \n","  inflating: data/press_479.npy      \n","  inflating: data/press_480.npy      \n","  inflating: data/press_482.npy      \n","  inflating: data/press_483.npy      \n","  inflating: data/press_486.npy      \n","  inflating: data/press_487.npy      \n","  inflating: data/press_488.npy      \n","  inflating: data/press_490.npy      \n","  inflating: data/press_493.npy      \n","  inflating: data/press_494.npy      \n","  inflating: data/press_495.npy      \n","  inflating: data/press_496.npy      \n","  inflating: data/press_497.npy      \n","  inflating: data/press_498.npy      \n","  inflating: data/press_499.npy      \n","  inflating: data/press_501.npy      \n","  inflating: data/press_502.npy      \n","  inflating: data/press_503.npy      \n","  inflating: data/press_504.npy      \n","  inflating: data/press_505.npy      \n","  inflating: data/press_507.npy      \n","  inflating: data/press_508.npy      \n","  inflating: data/press_509.npy      \n","  inflating: data/press_511.npy      \n","  inflating: data/press_512.npy      \n","  inflating: data/press_513.npy      \n","  inflating: data/press_514.npy      \n","  inflating: data/press_515.npy      \n","  inflating: data/press_516.npy      \n","  inflating: data/press_518.npy      \n","  inflating: data/press_519.npy      \n","  inflating: data/press_521.npy      \n","  inflating: data/press_522.npy      \n","  inflating: data/press_523.npy      \n","  inflating: data/press_524.npy      \n","  inflating: data/press_525.npy      \n","  inflating: data/press_527.npy      \n","  inflating: data/press_529.npy      \n","  inflating: data/press_530.npy      \n","  inflating: data/press_532.npy      \n","  inflating: data/press_533.npy      \n","  inflating: data/press_536.npy      \n","  inflating: data/press_538.npy      \n","  inflating: data/press_539.npy      \n","  inflating: data/press_540.npy      \n","  inflating: data/press_542.npy      \n","  inflating: data/press_543.npy      \n","  inflating: data/press_545.npy      \n","  inflating: data/press_547.npy      \n","  inflating: data/press_548.npy      \n","  inflating: data/press_549.npy      \n","  inflating: data/press_550.npy      \n","  inflating: data/press_551.npy      \n","  inflating: data/press_552.npy      \n","  inflating: data/press_553.npy      \n","  inflating: data/press_554.npy      \n","  inflating: data/press_555.npy      \n","  inflating: data/press_560.npy      \n","  inflating: data/press_561.npy      \n","  inflating: data/press_562.npy      \n","  inflating: data/press_564.npy      \n","  inflating: data/press_565.npy      \n","  inflating: data/press_566.npy      \n","  inflating: data/press_567.npy      \n","  inflating: data/press_568.npy      \n","  inflating: data/press_569.npy      \n","  inflating: data/press_572.npy      \n","  inflating: data/press_573.npy      \n","  inflating: data/press_574.npy      \n","  inflating: data/press_576.npy      \n","  inflating: data/press_577.npy      \n","  inflating: data/press_579.npy      \n","  inflating: data/press_581.npy      \n","  inflating: data/press_582.npy      \n","  inflating: data/press_583.npy      \n","  inflating: data/press_584.npy      \n","  inflating: data/press_587.npy      \n","  inflating: data/press_588.npy      \n","  inflating: data/press_589.npy      \n","  inflating: data/press_591.npy      \n","  inflating: data/press_593.npy      \n","  inflating: data/press_594.npy      \n","  inflating: data/press_595.npy      \n","  inflating: data/press_596.npy      \n","  inflating: data/press_597.npy      \n","  inflating: data/press_598.npy      \n","  inflating: data/press_600.npy      \n","  inflating: data/press_602.npy      \n","  inflating: data/press_604.npy      \n","  inflating: data/press_608.npy      \n","  inflating: data/press_610.npy      \n","  inflating: data/press_611.npy      \n","  inflating: data/press_612.npy      \n","  inflating: data/press_613.npy      \n","  inflating: data/press_615.npy      \n","  inflating: data/press_616.npy      \n","  inflating: data/press_617.npy      \n","  inflating: data/press_618.npy      \n","  inflating: data/press_620.npy      \n","  inflating: data/press_621.npy      \n","  inflating: data/press_622.npy      \n","  inflating: data/press_623.npy      \n","  inflating: data/press_625.npy      \n","  inflating: data/press_626.npy      \n","  inflating: data/press_627.npy      \n","  inflating: data/press_628.npy      \n","  inflating: data/press_629.npy      \n","  inflating: data/press_630.npy      \n","  inflating: data/press_631.npy      \n","  inflating: data/press_632.npy      \n","  inflating: data/press_633.npy      \n","  inflating: data/press_634.npy      \n","  inflating: data/press_635.npy      \n","  inflating: data/press_636.npy      \n","  inflating: data/press_638.npy      \n","  inflating: data/press_639.npy      \n","  inflating: data/press_640.npy      \n","  inflating: data/press_641.npy      \n","  inflating: data/press_642.npy      \n","  inflating: data/press_643.npy      \n","  inflating: data/press_644.npy      \n","  inflating: data/press_645.npy      \n","  inflating: data/press_646.npy      \n","  inflating: data/press_647.npy      \n","  inflating: data/press_648.npy      \n","  inflating: data/press_649.npy      \n","  inflating: data/press_651.npy      \n","  inflating: data/press_652.npy      \n","  inflating: data/press_654.npy      \n","  inflating: data/press_655.npy      \n","  inflating: data/press_656.npy      \n","  inflating: data/press_657.npy      \n"," extracting: data/train_pressure_min_std.txt  \n","  inflating: data/watertight_global_bounds.txt  \n","  inflating: data/watertight_meshes.txt  \n","Archive:  track_A.zip\n","  inflating: track_A/mesh_658.ply    \n","  inflating: track_A/mesh_659.ply    \n","  inflating: track_A/mesh_660.ply    \n","  inflating: track_A/mesh_661.ply    \n","  inflating: track_A/mesh_662.ply    \n","  inflating: track_A/mesh_663.ply    \n","  inflating: track_A/mesh_664.ply    \n","  inflating: track_A/mesh_665.ply    \n","  inflating: track_A/mesh_666.ply    \n","  inflating: track_A/mesh_667.ply    \n","  inflating: track_A/mesh_668.ply    \n","  inflating: track_A/mesh_669.ply    \n","  inflating: track_A/mesh_670.ply    \n","  inflating: track_A/mesh_671.ply    \n","  inflating: track_A/mesh_672.ply    \n","  inflating: track_A/mesh_673.ply    \n","  inflating: track_A/mesh_674.ply    \n","  inflating: track_A/mesh_675.ply    \n","  inflating: track_A/mesh_676.ply    \n","  inflating: track_A/mesh_677.ply    \n","  inflating: track_A/mesh_678.ply    \n","  inflating: track_A/mesh_679.ply    \n","  inflating: track_A/mesh_680.ply    \n","  inflating: track_A/mesh_681.ply    \n","  inflating: track_A/mesh_682.ply    \n","  inflating: track_A/mesh_683.ply    \n","  inflating: track_A/mesh_684.ply    \n","  inflating: track_A/mesh_685.ply    \n","  inflating: track_A/mesh_686.ply    \n","  inflating: track_A/mesh_687.ply    \n","  inflating: track_A/mesh_688.ply    \n","  inflating: track_A/mesh_689.ply    \n","  inflating: track_A/mesh_690.ply    \n","  inflating: track_A/mesh_691.ply    \n","  inflating: track_A/mesh_692.ply    \n","  inflating: track_A/mesh_693.ply    \n","  inflating: track_A/mesh_694.ply    \n","  inflating: track_A/mesh_695.ply    \n","  inflating: track_A/mesh_696.ply    \n","  inflating: track_A/mesh_697.ply    \n","  inflating: track_A/mesh_698.ply    \n","  inflating: track_A/mesh_699.ply    \n","  inflating: track_A/mesh_700.ply    \n","  inflating: track_A/mesh_701.ply    \n","  inflating: track_A/mesh_702.ply    \n","  inflating: track_A/mesh_703.ply    \n","  inflating: track_A/mesh_704.ply    \n","  inflating: track_A/mesh_705.ply    \n","  inflating: track_A/mesh_706.ply    \n","  inflating: track_A/mesh_707.ply    \n","  inflating: track_A/mesh_708.ply    \n","  inflating: track_A/mesh_709.ply    \n","  inflating: track_A/mesh_710.ply    \n","  inflating: track_A/mesh_711.ply    \n","  inflating: track_A/mesh_712.ply    \n","  inflating: track_A/mesh_713.ply    \n","  inflating: track_A/mesh_714.ply    \n","  inflating: track_A/mesh_715.ply    \n","  inflating: track_A/mesh_716.ply    \n","  inflating: track_A/mesh_717.ply    \n","  inflating: track_A/mesh_718.ply    \n","  inflating: track_A/mesh_719.ply    \n","  inflating: track_A/mesh_720.ply    \n","  inflating: track_A/mesh_721.ply    \n","  inflating: track_A/mesh_722.ply    \n","  inflating: track_A/watertight_meshes.txt  \n"]}],"source":["!rm -rf data/*\n","!mkdir -p data && unzip -o train_data.zip\n","!mkdir -p track_A && unzip -o track_A.zip\n","!mkdir -p train_track_B && unzip -o train_track_B.zip -d data_track_B/\n","!mkdir -p track_B && unzip -o track_B.zip\n","!mkdir track_B_vtk\n","!mkdir track_A_vtk\n","!mkdir data_centroid_track_B_vtk\n","!mkdir data_vtk\n","!mkdir data_track_C_vtk_preprocessed_data"]},{"cell_type":"markdown","metadata":{"id":"sRLfyacGYGcY"},"source":["## 通过requirements.txt一次性导入"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":1034455,"status":"ok","timestamp":1720670647901,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"qyojoHPfz0og","outputId":"f8de571b-96a5-412f-dcfc-541aca883b6a"},"outputs":[{"name":"stdout","output_type":"stream","text":["Requirement already satisfied: torch_geometric==2.5.3 in /usr/local/lib/python3.10/dist-packages (2.5.3)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (4.66.4)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (1.24.4)\n","Requirement already satisfied: scipy in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (1.8.0)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (2023.6.0)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (3.1.4)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (3.9.5)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (2.31.0)\n","Requirement already satisfied: pyparsing in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (3.1.2)\n","Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (1.2.2)\n","Requirement already satisfied: psutil>=5.8.0 in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (5.9.5)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (4.0.3)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch_geometric==2.5.3) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (2024.6.2)\n","Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->torch_geometric==2.5.3) (1.4.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->torch_geometric==2.5.3) (3.5.0)\n","Requirement already satisfied: vtk==9.3.0 in /usr/local/lib/python3.10/dist-packages (9.3.0)\n","Requirement already satisfied: matplotlib>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from vtk==9.3.0) (3.7.1)\n","Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (1.2.1)\n","Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (0.12.1)\n","Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (4.53.0)\n","Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (1.4.5)\n","Requirement already satisfied: numpy>=1.20 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (1.24.4)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (24.1)\n","Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (9.4.0)\n","Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (3.1.2)\n","Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=2.0.0->vtk==9.3.0) (2.8.2)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.7->matplotlib>=2.0.0->vtk==9.3.0) (1.16.0)\n","Requirement already satisfied: open3d==0.17.0 in /usr/local/lib/python3.10/dist-packages (0.17.0)\n","Requirement already satisfied: numpy>=1.18.0 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (1.24.4)\n","Requirement already satisfied: dash>=2.6.0 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (2.17.1)\n","Requirement already satisfied: werkzeug>=2.2.3 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (3.0.3)\n","Requirement already satisfied: nbformat==5.7.0 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (5.7.0)\n","Requirement already satisfied: configargparse in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (1.7)\n","Requirement already satisfied: ipywidgets>=8.0.4 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (8.1.3)\n","Requirement already satisfied: addict in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (2.4.0)\n","Requirement already satisfied: pillow>=9.3.0 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (9.4.0)\n","Requirement already satisfied: matplotlib>=3 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (3.7.1)\n","Requirement already satisfied: pandas>=1.0 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (2.2.2)\n","Requirement already satisfied: pyyaml>=5.4.1 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (6.0.1)\n","Requirement already satisfied: scikit-learn>=0.21 in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (1.2.2)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (4.66.4)\n","Requirement already satisfied: pyquaternion in /usr/local/lib/python3.10/dist-packages (from open3d==0.17.0) (0.9.9)\n","Requirement already satisfied: fastjsonschema in /usr/local/lib/python3.10/dist-packages (from nbformat==5.7.0->open3d==0.17.0) (2.20.0)\n","Requirement already satisfied: jsonschema>=2.6 in /usr/local/lib/python3.10/dist-packages (from nbformat==5.7.0->open3d==0.17.0) (4.19.2)\n","Requirement already satisfied: jupyter-core in /usr/local/lib/python3.10/dist-packages (from nbformat==5.7.0->open3d==0.17.0) (5.7.2)\n","Requirement already satisfied: traitlets>=5.1 in /usr/local/lib/python3.10/dist-packages (from nbformat==5.7.0->open3d==0.17.0) (5.7.1)\n","Requirement already satisfied: Flask<3.1,>=1.0.4 in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (2.2.5)\n","Requirement already satisfied: plotly>=5.0.0 in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (5.15.0)\n","Requirement already satisfied: dash-html-components==2.0.0 in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (2.0.0)\n","Requirement already satisfied: dash-core-components==2.0.0 in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (2.0.0)\n","Requirement already satisfied: dash-table==5.0.0 in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (5.0.0)\n","Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (8.0.0)\n","Requirement already satisfied: typing-extensions>=4.1.1 in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (4.12.2)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (2.31.0)\n","Requirement already satisfied: retrying in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (1.3.4)\n","Requirement already satisfied: nest-asyncio in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (1.6.0)\n","Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from dash>=2.6.0->open3d==0.17.0) (67.7.2)\n","Requirement already satisfied: comm>=0.1.3 in /usr/local/lib/python3.10/dist-packages (from ipywidgets>=8.0.4->open3d==0.17.0) (0.2.2)\n","Requirement already satisfied: ipython>=6.1.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets>=8.0.4->open3d==0.17.0) (7.34.0)\n","Requirement already satisfied: widgetsnbextension~=4.0.11 in /usr/local/lib/python3.10/dist-packages (from ipywidgets>=8.0.4->open3d==0.17.0) (4.0.11)\n","Requirement already satisfied: jupyterlab-widgets~=3.0.11 in /usr/local/lib/python3.10/dist-packages (from ipywidgets>=8.0.4->open3d==0.17.0) (3.0.11)\n","Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (1.2.1)\n","Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (0.12.1)\n","Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (4.53.0)\n","Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (1.4.5)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (24.1)\n","Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (3.1.2)\n","Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3->open3d==0.17.0) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=1.0->open3d==0.17.0) (2023.4)\n","Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.10/dist-packages (from pandas>=1.0->open3d==0.17.0) (2024.1)\n","Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21->open3d==0.17.0) (1.8.0)\n","Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21->open3d==0.17.0) (1.4.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21->open3d==0.17.0) (3.5.0)\n","Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=2.2.3->open3d==0.17.0) (2.1.5)\n","Requirement already satisfied: Jinja2>=3.0 in /usr/local/lib/python3.10/dist-packages (from Flask<3.1,>=1.0.4->dash>=2.6.0->open3d==0.17.0) (3.1.4)\n","Requirement already satisfied: itsdangerous>=2.0 in /usr/local/lib/python3.10/dist-packages (from Flask<3.1,>=1.0.4->dash>=2.6.0->open3d==0.17.0) (2.2.0)\n","Requirement already satisfied: click>=8.0 in /usr/local/lib/python3.10/dist-packages (from Flask<3.1,>=1.0.4->dash>=2.6.0->open3d==0.17.0) (8.1.7)\n","Requirement already satisfied: jedi>=0.16 in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.19.1)\n","Requirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (4.4.2)\n","Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.7.5)\n","Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (3.0.47)\n","Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (2.16.1)\n","Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.2.0)\n","Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.1.7)\n","Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (4.9.0)\n","Requirement already satisfied: attrs>=22.2.0 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat==5.7.0->open3d==0.17.0) (23.2.0)\n","Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat==5.7.0->open3d==0.17.0) (2023.12.1)\n","Requirement already satisfied: referencing>=0.28.4 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat==5.7.0->open3d==0.17.0) (0.35.1)\n","Requirement already satisfied: rpds-py>=0.7.1 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat==5.7.0->open3d==0.17.0) (0.18.1)\n","Requirement already satisfied: tenacity>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from plotly>=5.0.0->dash>=2.6.0->open3d==0.17.0) (8.4.2)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.7->matplotlib>=3->open3d==0.17.0) (1.16.0)\n","Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.10/dist-packages (from importlib-metadata->dash>=2.6.0->open3d==0.17.0) (3.19.2)\n","Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter-core->nbformat==5.7.0->open3d==0.17.0) (4.2.2)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->dash>=2.6.0->open3d==0.17.0) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->dash>=2.6.0->open3d==0.17.0) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->dash>=2.6.0->open3d==0.17.0) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->dash>=2.6.0->open3d==0.17.0) (2024.6.2)\n","Requirement already satisfied: parso<0.9.0,>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.8.4)\n","Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.10/dist-packages (from pexpect>4.3->ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.7.0)\n","Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=6.1.0->ipywidgets>=8.0.4->open3d==0.17.0) (0.2.13)\n","Requirement already satisfied: tqdm==4.66.4 in /usr/local/lib/python3.10/dist-packages (4.66.4)\n","Requirement already satisfied: meshio==5.3.5 in /usr/local/lib/python3.10/dist-packages (5.3.5)\n","Requirement already satisfied: numpy>=1.20.0 in /usr/local/lib/python3.10/dist-packages (from meshio==5.3.5) (1.24.4)\n","Requirement already satisfied: rich in /usr/local/lib/python3.10/dist-packages (from meshio==5.3.5) (13.7.1)\n","Requirement already satisfied: markdown-it-py>=2.2.0 in /usr/local/lib/python3.10/dist-packages (from rich->meshio==5.3.5) (3.0.0)\n","Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from rich->meshio==5.3.5) (2.16.1)\n","Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.10/dist-packages (from markdown-it-py>=2.2.0->rich->meshio==5.3.5) (0.1.2)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (1.24.4)\n","Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.2.2)\n","Requirement already satisfied: numpy>=1.22.4 in /usr/local/lib/python3.10/dist-packages (from pandas) (1.24.4)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4)\n","Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas) (1.16.0)\n","Requirement already satisfied: pyaml==24.4.0 in /usr/local/lib/python3.10/dist-packages (24.4.0)\n","Requirement already satisfied: PyYAML in /usr/local/lib/python3.10/dist-packages (from pyaml==24.4.0) (6.0.1)\n","Requirement already satisfied: scipy==1.8.0 in /usr/local/lib/python3.10/dist-packages (1.8.0)\n","Requirement already satisfied: scikit-learn==1.2.2 in /usr/local/lib/python3.10/dist-packages (1.2.2)\n","Requirement already satisfied: numpy<1.25.0,>=1.17.3 in /usr/local/lib/python3.10/dist-packages (from scipy==1.8.0) (1.24.4)\n","Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn==1.2.2) (1.4.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn==1.2.2) (3.5.0)\n","Collecting torch_cluster==1.6.3\n","  Using cached torch_cluster-1.6.3.tar.gz (54 kB)\n","  Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: scipy in /usr/local/lib/python3.10/dist-packages (from torch_cluster==1.6.3) (1.8.0)\n","Requirement already satisfied: numpy<1.25.0,>=1.17.3 in /usr/local/lib/python3.10/dist-packages (from scipy->torch_cluster==1.6.3) (1.24.4)\n","Building wheels for collected packages: torch_cluster\n","  Building wheel for torch_cluster (setup.py) ... \u001b[?25l\u001b[?25hdone\n","  Created wheel for torch_cluster: filename=torch_cluster-1.6.3-cp310-cp310-linux_x86_64.whl size=722828 sha256=99a10bb3bb43a044a630d607a0908b1758bfa2320acab9d723d1d05055f00f86\n","  Stored in directory: /root/.cache/pip/wheels/51/78/c3/536637b3cdcc3313aa5e8851a6c72b97f6a01877e68c7595e3\n","Successfully built torch_cluster\n","Installing collected packages: torch_cluster\n","Successfully installed torch_cluster-1.6.3\n","Requirement already satisfied: torch_geometric==2.5.3 in /usr/local/lib/python3.10/dist-packages (2.5.3)\n","Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (4.66.4)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (1.24.4)\n","Requirement already satisfied: scipy in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (1.8.0)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (2023.6.0)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (3.1.4)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (3.9.5)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (2.31.0)\n","Requirement already satisfied: pyparsing in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (3.1.2)\n","Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (1.2.2)\n","Requirement already satisfied: psutil>=5.8.0 in /usr/local/lib/python3.10/dist-packages (from torch_geometric==2.5.3) (5.9.5)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (1.3.1)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (23.2.0)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (1.4.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (6.0.5)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (1.9.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->torch_geometric==2.5.3) (4.0.3)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch_geometric==2.5.3) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->torch_geometric==2.5.3) (2024.6.2)\n","Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->torch_geometric==2.5.3) (1.4.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn->torch_geometric==2.5.3) (3.5.0)\n","Collecting timm==1.0.3\n","  Downloading timm-1.0.3-py3-none-any.whl (2.3 MB)\n","\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.3/2.3 MB\u001b[0m \u001b[31m20.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: torch in /usr/local/lib/python3.10/dist-packages (from timm==1.0.3) (2.3.0+cu121)\n","Requirement already satisfied: torchvision in /usr/local/lib/python3.10/dist-packages (from timm==1.0.3) (0.18.0+cu121)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from timm==1.0.3) (6.0.1)\n","Requirement already satisfied: huggingface_hub in /usr/local/lib/python3.10/dist-packages (from timm==1.0.3) (0.23.4)\n","Requirement already satisfied: safetensors in /usr/local/lib/python3.10/dist-packages (from timm==1.0.3) (0.4.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from huggingface_hub->timm==1.0.3) (3.15.4)\n","Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from huggingface_hub->timm==1.0.3) (2023.6.0)\n","Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.10/dist-packages (from huggingface_hub->timm==1.0.3) (24.1)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from huggingface_hub->timm==1.0.3) (2.31.0)\n","Requirement already satisfied: tqdm>=4.42.1 in /usr/local/lib/python3.10/dist-packages (from huggingface_hub->timm==1.0.3) (4.66.4)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface_hub->timm==1.0.3) (4.12.2)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch->timm==1.0.3) (1.12.1)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch->timm==1.0.3) (3.3)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch->timm==1.0.3) (3.1.4)\n","Collecting nvidia-cuda-nvrtc-cu12==12.1.105 (from torch->timm==1.0.3)\n","  Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n","Collecting nvidia-cuda-runtime-cu12==12.1.105 (from torch->timm==1.0.3)\n","  Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n","Collecting nvidia-cuda-cupti-cu12==12.1.105 (from torch->timm==1.0.3)\n","  Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n","Collecting nvidia-cudnn-cu12==8.9.2.26 (from torch->timm==1.0.3)\n","  Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n","Collecting nvidia-cublas-cu12==12.1.3.1 (from torch->timm==1.0.3)\n","  Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n","Collecting nvidia-cufft-cu12==11.0.2.54 (from torch->timm==1.0.3)\n","  Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n","Collecting nvidia-curand-cu12==10.3.2.106 (from torch->timm==1.0.3)\n","  Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n","Collecting nvidia-cusolver-cu12==11.4.5.107 (from torch->timm==1.0.3)\n","  Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n","Collecting nvidia-cusparse-cu12==12.1.0.106 (from torch->timm==1.0.3)\n","  Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n","Collecting nvidia-nccl-cu12==2.20.5 (from torch->timm==1.0.3)\n","  Using cached nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl (176.2 MB)\n","Collecting nvidia-nvtx-cu12==12.1.105 (from torch->timm==1.0.3)\n","  Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n","Requirement already satisfied: triton==2.3.0 in /usr/local/lib/python3.10/dist-packages (from torch->timm==1.0.3) (2.3.0)\n","Collecting nvidia-nvjitlink-cu12 (from nvidia-cusolver-cu12==11.4.5.107->torch->timm==1.0.3)\n","  Downloading nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl (21.3 MB)\n","\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.3/21.3 MB\u001b[0m \u001b[31m52.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from torchvision->timm==1.0.3) (1.24.4)\n","Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.10/dist-packages (from torchvision->timm==1.0.3) (9.4.0)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch->timm==1.0.3) (2.1.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface_hub->timm==1.0.3) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface_hub->timm==1.0.3) (3.7)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface_hub->timm==1.0.3) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->huggingface_hub->timm==1.0.3) (2024.6.2)\n","Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch->timm==1.0.3) (1.3.0)\n","Installing collected packages: nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, nvidia-cusparse-cu12, nvidia-cudnn-cu12, nvidia-cusolver-cu12, timm\n","Successfully installed nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.20.5 nvidia-nvjitlink-cu12-12.5.82 nvidia-nvtx-cu12-12.1.105 timm-1.0.3\n","Collecting einops\n","  Downloading einops-0.8.0-py3-none-any.whl (43 kB)\n","\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m43.2/43.2 kB\u001b[0m \u001b[31m1.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hInstalling collected packages: einops\n","Successfully installed einops-0.8.0\n"]}],"source":["! pip install torch_geometric==2.5.3\n","! pip install vtk==9.3.0\n","! pip install open3d==0.17.0\n","! pip install tqdm==4.66.4\n","! pip install meshio==5.3.5\n","! pip install numpy\n","! pip install pandas\n","! pip install pyaml==24.4.0\n","! pip install scipy==1.8.0 scikit-learn==1.2.2\n","! pip install torch_cluster==1.6.3\n","! pip install torch_geometric==2.5.3\n","! pip install timm==1.0.3\n","! pip install einops\n","! pip install torch==2.3.0"]},{"cell_type":"markdown","metadata":{"id":"bQReQk5H3eiL"},"source":[]},{"cell_type":"markdown","metadata":{"id":"DUxmPjWWV1sr"},"source":["# 额外数据导入\n","（此处导入权重文件和额外数据集，在此之外的导入将有被判违规的风险，这里以导入随机生成的Track C的A榜样例提交的zip为例子）"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":2981,"status":"ok","timestamp":1720671747066,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"H8YjMlYcYmei","outputId":"dad65393-7943-4872-8774-5a73ad10ddc2"},"outputs":[{"name":"stdout","output_type":"stream","text":["Downloading...\n","From (original): https://drive.google.com/uc?export=download&id=1UHs_jLf4ir0bct5CDHNJuIIV7NijyWWg\n","From (redirected): https://drive.google.com/uc?export=download&id=1UHs_jLf4ir0bct5CDHNJuIIV7NijyWWg&confirm=t&uuid=ff8e2cc6-b26c-4d70-a2f9-efa59a8a8a2d\n","To: /content/model_200.pth\n","100% 78.0M/78.0M [00:01<00:00, 55.0MB/s]\n"]}],"source":["!gdown 'https://drive.google.com/uc?export=download&id=1UHs_jLf4ir0bct5CDHNJuIIV7NijyWWg' -O model_200.pth"]},{"cell_type":"markdown","metadata":{"id":"SkhR5W3k25ye"},"source":["A数据集的处理"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":1000},"executionInfo":{"elapsed":24106,"status":"ok","timestamp":1720670853997,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"Zfcg1SNQ-Pr-","outputId":"1755d1d6-920f-461f-f6bf-436813d6acca"},"outputs":[{"name":"stdout","output_type":"stream","text":["Processed mesh_350.ply to data_vtk/mesh_350.vtk\n","Processed mesh_554.ply to data_vtk/mesh_554.vtk\n","Processed mesh_532.ply to data_vtk/mesh_532.vtk\n","Processed mesh_156.ply to data_vtk/mesh_156.vtk\n","Processed mesh_121.ply to data_vtk/mesh_121.vtk\n","Processed mesh_499.ply to data_vtk/mesh_499.vtk\n","Processed mesh_344.ply to data_vtk/mesh_344.vtk\n","Processed mesh_205.ply to data_vtk/mesh_205.vtk\n","Processed mesh_145.ply to data_vtk/mesh_145.vtk\n","Processed mesh_151.ply to data_vtk/mesh_151.vtk\n","Processed mesh_420.ply to data_vtk/mesh_420.vtk\n","Processed mesh_519.ply to data_vtk/mesh_519.vtk\n","Processed mesh_440.ply to data_vtk/mesh_440.vtk\n","Processed mesh_001.ply to data_vtk/mesh_001.vtk\n","Processed mesh_633.ply to data_vtk/mesh_633.vtk\n","Processed mesh_398.ply to data_vtk/mesh_398.vtk\n","Processed mesh_141.ply to data_vtk/mesh_141.vtk\n","Processed mesh_413.ply to data_vtk/mesh_413.vtk\n","Processed mesh_004.ply to data_vtk/mesh_004.vtk\n","Processed mesh_309.ply to data_vtk/mesh_309.vtk\n","Processed mesh_501.ply to data_vtk/mesh_501.vtk\n","Processed mesh_459.ply to data_vtk/mesh_459.vtk\n","Processed mesh_638.ply to data_vtk/mesh_638.vtk\n","Processed mesh_366.ply to data_vtk/mesh_366.vtk\n","Processed mesh_055.ply to data_vtk/mesh_055.vtk\n","Processed mesh_114.ply to data_vtk/mesh_114.vtk\n","Processed mesh_340.ply to data_vtk/mesh_340.vtk\n","Processed mesh_547.ply to data_vtk/mesh_547.vtk\n","Processed mesh_564.ply to data_vtk/mesh_564.vtk\n","Processed mesh_005.ply to data_vtk/mesh_005.vtk\n","Processed mesh_272.ply to data_vtk/mesh_272.vtk\n","Processed mesh_184.ply to data_vtk/mesh_184.vtk\n","Processed mesh_515.ply to data_vtk/mesh_515.vtk\n","Processed mesh_418.ply to data_vtk/mesh_418.vtk\n","Processed mesh_072.ply to data_vtk/mesh_072.vtk\n","Processed mesh_632.ply to data_vtk/mesh_632.vtk\n","Processed mesh_628.ply to data_vtk/mesh_628.vtk\n","Processed mesh_261.ply to data_vtk/mesh_261.vtk\n","Processed mesh_314.ply to data_vtk/mesh_314.vtk\n","Processed mesh_313.ply to data_vtk/mesh_313.vtk\n","Processed mesh_043.ply to data_vtk/mesh_043.vtk\n","Processed mesh_048.ply to data_vtk/mesh_048.vtk\n","Processed mesh_608.ply to data_vtk/mesh_608.vtk\n","Processed mesh_047.ply to data_vtk/mesh_047.vtk\n","Processed mesh_120.ply to data_vtk/mesh_120.vtk\n","Processed mesh_379.ply to data_vtk/mesh_379.vtk\n","Processed mesh_358.ply to data_vtk/mesh_358.vtk\n","Processed mesh_007.ply to data_vtk/mesh_007.vtk\n","Processed mesh_134.ply to data_vtk/mesh_134.vtk\n","Processed mesh_503.ply to data_vtk/mesh_503.vtk\n","Processed mesh_247.ply to data_vtk/mesh_247.vtk\n","Processed mesh_018.ply to data_vtk/mesh_018.vtk\n","Processed mesh_062.ply to data_vtk/mesh_062.vtk\n","Processed mesh_463.ply to data_vtk/mesh_463.vtk\n","Processed mesh_088.ply to data_vtk/mesh_088.vtk\n","Processed mesh_376.ply to data_vtk/mesh_376.vtk\n","Processed mesh_311.ply to data_vtk/mesh_311.vtk\n","Processed mesh_579.ply to data_vtk/mesh_579.vtk\n","Processed mesh_508.ply to data_vtk/mesh_508.vtk\n","Processed mesh_181.ply to data_vtk/mesh_181.vtk\n","Processed mesh_095.ply to data_vtk/mesh_095.vtk\n","Processed mesh_220.ply to data_vtk/mesh_220.vtk\n","Processed mesh_271.ply to data_vtk/mesh_271.vtk\n","Processed mesh_225.ply to data_vtk/mesh_225.vtk\n","Processed mesh_373.ply to data_vtk/mesh_373.vtk\n","Processed mesh_161.ply to data_vtk/mesh_161.vtk\n","Processed mesh_582.ply to data_vtk/mesh_582.vtk\n","Processed mesh_419.ply to data_vtk/mesh_419.vtk\n","Processed mesh_380.ply to data_vtk/mesh_380.vtk\n","Processed mesh_274.ply to data_vtk/mesh_274.vtk\n","Processed mesh_107.ply to data_vtk/mesh_107.vtk\n","Processed mesh_235.ply to data_vtk/mesh_235.vtk\n","Processed mesh_035.ply to data_vtk/mesh_035.vtk\n","Processed mesh_652.ply to data_vtk/mesh_652.vtk\n","Processed mesh_455.ply to data_vtk/mesh_455.vtk\n","Processed mesh_182.ply to data_vtk/mesh_182.vtk\n","Processed mesh_023.ply to data_vtk/mesh_023.vtk\n","Processed mesh_094.ply to data_vtk/mesh_094.vtk\n","Processed mesh_207.ply to data_vtk/mesh_207.vtk\n","Processed mesh_613.ply to data_vtk/mesh_613.vtk\n","Processed mesh_199.ply to data_vtk/mesh_199.vtk\n","Processed mesh_305.ply to data_vtk/mesh_305.vtk\n","Processed mesh_139.ply to data_vtk/mesh_139.vtk\n","Processed mesh_584.ply to data_vtk/mesh_584.vtk\n","Processed mesh_080.ply to data_vtk/mesh_080.vtk\n","Processed mesh_058.ply to data_vtk/mesh_058.vtk\n","Processed mesh_067.ply to data_vtk/mesh_067.vtk\n","Processed mesh_465.ply to data_vtk/mesh_465.vtk\n","Processed mesh_514.ply to data_vtk/mesh_514.vtk\n","Processed mesh_222.ply to data_vtk/mesh_222.vtk\n","Processed mesh_143.ply to data_vtk/mesh_143.vtk\n","Processed mesh_525.ply to data_vtk/mesh_525.vtk\n","Processed mesh_604.ply to data_vtk/mesh_604.vtk\n","Processed mesh_253.ply to data_vtk/mesh_253.vtk\n","Processed mesh_140.ply to data_vtk/mesh_140.vtk\n","Processed mesh_621.ply to data_vtk/mesh_621.vtk\n","Processed mesh_293.ply to data_vtk/mesh_293.vtk\n","Processed mesh_516.ply to data_vtk/mesh_516.vtk\n","Processed mesh_176.ply to data_vtk/mesh_176.vtk\n","Processed mesh_626.ply to data_vtk/mesh_626.vtk\n","Processed mesh_162.ply to data_vtk/mesh_162.vtk\n","Processed mesh_381.ply to data_vtk/mesh_381.vtk\n","Processed mesh_236.ply to data_vtk/mesh_236.vtk\n","Processed mesh_476.ply to data_vtk/mesh_476.vtk\n","Processed mesh_320.ply to data_vtk/mesh_320.vtk\n","Processed mesh_360.ply to data_vtk/mesh_360.vtk\n","Processed mesh_106.ply to data_vtk/mesh_106.vtk\n","Processed mesh_119.ply to data_vtk/mesh_119.vtk\n","Processed mesh_473.ply to data_vtk/mesh_473.vtk\n","Processed mesh_555.ply to data_vtk/mesh_555.vtk\n","Processed mesh_329.ply to data_vtk/mesh_329.vtk\n","Processed mesh_217.ply to data_vtk/mesh_217.vtk\n","Processed mesh_530.ply to data_vtk/mesh_530.vtk\n","Processed mesh_393.ply to data_vtk/mesh_393.vtk\n","Processed mesh_511.ply to data_vtk/mesh_511.vtk\n","Processed mesh_046.ply to data_vtk/mesh_046.vtk\n","Processed mesh_460.ply to data_vtk/mesh_460.vtk\n","Processed mesh_456.ply to data_vtk/mesh_456.vtk\n","Processed mesh_237.ply to data_vtk/mesh_237.vtk\n","Processed mesh_542.ply to data_vtk/mesh_542.vtk\n","Processed mesh_645.ply to data_vtk/mesh_645.vtk\n","Processed mesh_567.ply to data_vtk/mesh_567.vtk\n","Processed mesh_172.ply to data_vtk/mesh_172.vtk\n","Processed mesh_551.ply to data_vtk/mesh_551.vtk\n","Processed mesh_190.ply to data_vtk/mesh_190.vtk\n","Processed mesh_364.ply to data_vtk/mesh_364.vtk\n","Processed mesh_384.ply to data_vtk/mesh_384.vtk\n","Processed mesh_097.ply to data_vtk/mesh_097.vtk\n","Processed mesh_451.ply to data_vtk/mesh_451.vtk\n","Processed mesh_233.ply to data_vtk/mesh_233.vtk\n","Processed mesh_262.ply to data_vtk/mesh_262.vtk\n","Processed mesh_414.ply to data_vtk/mesh_414.vtk\n","Processed mesh_148.ply to data_vtk/mesh_148.vtk\n","Processed mesh_193.ply to data_vtk/mesh_193.vtk\n","Processed mesh_201.ply to data_vtk/mesh_201.vtk\n","Processed mesh_634.ply to data_vtk/mesh_634.vtk\n","Processed mesh_498.ply to data_vtk/mesh_498.vtk\n","Processed mesh_452.ply to data_vtk/mesh_452.vtk\n","Processed mesh_325.ply to data_vtk/mesh_325.vtk\n","Processed mesh_075.ply to data_vtk/mesh_075.vtk\n","Processed mesh_264.ply to data_vtk/mesh_264.vtk\n","Processed mesh_524.ply to data_vtk/mesh_524.vtk\n","Processed mesh_087.ply to data_vtk/mesh_087.vtk\n","Processed mesh_112.ply to data_vtk/mesh_112.vtk\n","Processed mesh_017.ply to data_vtk/mesh_017.vtk\n","Processed mesh_219.ply to data_vtk/mesh_219.vtk\n","Processed mesh_224.ply to data_vtk/mesh_224.vtk\n","Processed mesh_328.ply to data_vtk/mesh_328.vtk\n","Processed mesh_543.ply to data_vtk/mesh_543.vtk\n","Processed mesh_594.ply to data_vtk/mesh_594.vtk\n","Processed mesh_641.ply to data_vtk/mesh_641.vtk\n","Processed mesh_117.ply to data_vtk/mesh_117.vtk\n","Processed mesh_111.ply to data_vtk/mesh_111.vtk\n","Processed mesh_448.ply to data_vtk/mesh_448.vtk\n","Processed mesh_486.ply to data_vtk/mesh_486.vtk\n","Processed mesh_302.ply to data_vtk/mesh_302.vtk\n","Processed mesh_249.ply to data_vtk/mesh_249.vtk\n","Processed mesh_163.ply to data_vtk/mesh_163.vtk\n","Processed mesh_479.ply to data_vtk/mesh_479.vtk\n","Processed mesh_091.ply to data_vtk/mesh_091.vtk\n","Processed mesh_642.ply to data_vtk/mesh_642.vtk\n","Processed mesh_296.ply to data_vtk/mesh_296.vtk\n","Processed mesh_483.ply to data_vtk/mesh_483.vtk\n","Processed mesh_289.ply to data_vtk/mesh_289.vtk\n","Processed mesh_228.ply to data_vtk/mesh_228.vtk\n","Processed mesh_587.ply to data_vtk/mesh_587.vtk\n","Processed mesh_403.ply to data_vtk/mesh_403.vtk\n","Processed mesh_401.ply to data_vtk/mesh_401.vtk\n","Processed mesh_025.ply to data_vtk/mesh_025.vtk\n","Processed mesh_482.ply to data_vtk/mesh_482.vtk\n","Processed mesh_231.ply to data_vtk/mesh_231.vtk\n","Processed mesh_052.ply to data_vtk/mesh_052.vtk\n","Processed mesh_251.ply to data_vtk/mesh_251.vtk\n","Processed mesh_636.ply to data_vtk/mesh_636.vtk\n","Processed mesh_439.ply to data_vtk/mesh_439.vtk\n","Processed mesh_369.ply to data_vtk/mesh_369.vtk\n","Processed mesh_210.ply to data_vtk/mesh_210.vtk\n","Processed mesh_266.ply to data_vtk/mesh_266.vtk\n","Processed mesh_101.ply to data_vtk/mesh_101.vtk\n","Processed mesh_200.ply to data_vtk/mesh_200.vtk\n","Processed mesh_277.ply to data_vtk/mesh_277.vtk\n","Processed mesh_504.ply to data_vtk/mesh_504.vtk\n","Processed mesh_049.ply to data_vtk/mesh_049.vtk\n","Processed mesh_595.ply to data_vtk/mesh_595.vtk\n","Processed mesh_310.ply to data_vtk/mesh_310.vtk\n","Processed mesh_654.ply to data_vtk/mesh_654.vtk\n","Processed mesh_173.ply to data_vtk/mesh_173.vtk\n","Processed mesh_657.ply to data_vtk/mesh_657.vtk\n","Processed mesh_243.ply to data_vtk/mesh_243.vtk\n","Processed mesh_133.ply to data_vtk/mesh_133.vtk\n","Processed mesh_295.ply to data_vtk/mesh_295.vtk\n","Processed mesh_212.ply to data_vtk/mesh_212.vtk\n","Processed mesh_462.ply to data_vtk/mesh_462.vtk\n","Processed mesh_027.ply to data_vtk/mesh_027.vtk\n","Processed mesh_540.ply to data_vtk/mesh_540.vtk\n","Processed mesh_032.ply to data_vtk/mesh_032.vtk\n","Processed mesh_494.ply to data_vtk/mesh_494.vtk\n","Processed mesh_548.ply to data_vtk/mesh_548.vtk\n","Processed mesh_355.ply to data_vtk/mesh_355.vtk\n","Processed mesh_425.ply to data_vtk/mesh_425.vtk\n","Processed mesh_354.ply to data_vtk/mesh_354.vtk\n","Processed mesh_153.ply to data_vtk/mesh_153.vtk\n","Processed mesh_593.ply to data_vtk/mesh_593.vtk\n","Processed mesh_454.ply to data_vtk/mesh_454.vtk\n","Processed mesh_611.ply to data_vtk/mesh_611.vtk\n","Processed mesh_589.ply to data_vtk/mesh_589.vtk\n","Processed mesh_285.ply to data_vtk/mesh_285.vtk\n","Processed mesh_306.ply to data_vtk/mesh_306.vtk\n","Processed mesh_518.ply to data_vtk/mesh_518.vtk\n","Processed mesh_092.ply to data_vtk/mesh_092.vtk\n","Processed mesh_322.ply to data_vtk/mesh_322.vtk\n","Processed mesh_447.ply to data_vtk/mesh_447.vtk\n","Processed mesh_258.ply to data_vtk/mesh_258.vtk\n","Processed mesh_631.ply to data_vtk/mesh_631.vtk\n","Processed mesh_569.ply to data_vtk/mesh_569.vtk\n","Processed mesh_545.ply to data_vtk/mesh_545.vtk\n","Processed mesh_602.ply to data_vtk/mesh_602.vtk\n","Processed mesh_356.ply to data_vtk/mesh_356.vtk\n","Processed mesh_021.ply to data_vtk/mesh_021.vtk\n","Processed mesh_581.ply to data_vtk/mesh_581.vtk\n","Processed mesh_443.ply to data_vtk/mesh_443.vtk\n","Processed mesh_118.ply to data_vtk/mesh_118.vtk\n","Processed mesh_273.ply to data_vtk/mesh_273.vtk\n","Processed mesh_178.ply to data_vtk/mesh_178.vtk\n","Processed mesh_081.ply to data_vtk/mesh_081.vtk\n","Processed mesh_648.ply to data_vtk/mesh_648.vtk\n","Processed mesh_629.ply to data_vtk/mesh_629.vtk\n","Processed mesh_292.ply to data_vtk/mesh_292.vtk\n","Processed mesh_433.ply to data_vtk/mesh_433.vtk\n","Processed mesh_324.ply to data_vtk/mesh_324.vtk\n","Processed mesh_478.ply to data_vtk/mesh_478.vtk\n","Processed mesh_474.ply to data_vtk/mesh_474.vtk\n","Processed mesh_102.ply to data_vtk/mesh_102.vtk\n","Processed mesh_147.ply to data_vtk/mesh_147.vtk\n","Processed mesh_215.ply to data_vtk/mesh_215.vtk\n","Processed mesh_505.ply to data_vtk/mesh_505.vtk\n","Processed mesh_533.ply to data_vtk/mesh_533.vtk\n","Processed mesh_223.ply to data_vtk/mesh_223.vtk\n","Processed mesh_086.ply to data_vtk/mesh_086.vtk\n","Processed mesh_158.ply to data_vtk/mesh_158.vtk\n","Processed mesh_507.ply to data_vtk/mesh_507.vtk\n","Processed mesh_304.ply to data_vtk/mesh_304.vtk\n","Processed mesh_281.ply to data_vtk/mesh_281.vtk\n","Processed mesh_490.ply to data_vtk/mesh_490.vtk\n","Processed mesh_074.ply to data_vtk/mesh_074.vtk\n","Processed mesh_422.ply to data_vtk/mesh_422.vtk\n","Processed mesh_410.ply to data_vtk/mesh_410.vtk\n","Processed mesh_349.ply to data_vtk/mesh_349.vtk\n","Processed mesh_128.ply to data_vtk/mesh_128.vtk\n","Processed mesh_610.ply to data_vtk/mesh_610.vtk\n","Processed mesh_444.ply to data_vtk/mesh_444.vtk\n","Processed mesh_627.ply to data_vtk/mesh_627.vtk\n","Processed mesh_341.ply to data_vtk/mesh_341.vtk\n","Processed mesh_255.ply to data_vtk/mesh_255.vtk\n","Processed mesh_039.ply to data_vtk/mesh_039.vtk\n","Processed mesh_100.ply to data_vtk/mesh_100.vtk\n","Processed mesh_367.ply to data_vtk/mesh_367.vtk\n","Processed mesh_300.ply to data_vtk/mesh_300.vtk\n","Processed mesh_597.ply to data_vtk/mesh_597.vtk\n","Processed mesh_527.ply to data_vtk/mesh_527.vtk\n","Processed mesh_170.ply to data_vtk/mesh_170.vtk\n","Processed mesh_263.ply to data_vtk/mesh_263.vtk\n","Processed mesh_211.ply to data_vtk/mesh_211.vtk\n","Processed mesh_022.ply to data_vtk/mesh_022.vtk\n","Processed mesh_496.ply to data_vtk/mesh_496.vtk\n","Processed mesh_576.ply to data_vtk/mesh_576.vtk\n","Processed mesh_166.ply to data_vtk/mesh_166.vtk\n","Processed mesh_385.ply to data_vtk/mesh_385.vtk\n","Processed mesh_630.ply to data_vtk/mesh_630.vtk\n","Processed mesh_348.ply to data_vtk/mesh_348.vtk\n","Processed mesh_012.ply to data_vtk/mesh_012.vtk\n","Processed mesh_392.ply to data_vtk/mesh_392.vtk\n","Processed mesh_286.ply to data_vtk/mesh_286.vtk\n","Processed mesh_572.ply to data_vtk/mesh_572.vtk\n","Processed mesh_280.ply to data_vtk/mesh_280.vtk\n","Processed mesh_389.ply to data_vtk/mesh_389.vtk\n","Processed mesh_290.ply to data_vtk/mesh_290.vtk\n","Processed mesh_026.ply to data_vtk/mesh_026.vtk\n","Processed mesh_640.ply to data_vtk/mesh_640.vtk\n","Processed mesh_509.ply to data_vtk/mesh_509.vtk\n","Processed mesh_213.ply to data_vtk/mesh_213.vtk\n","Processed mesh_157.ply to data_vtk/mesh_157.vtk\n","Processed mesh_165.ply to data_vtk/mesh_165.vtk\n","Processed mesh_405.ply to data_vtk/mesh_405.vtk\n","Processed mesh_466.ply to data_vtk/mesh_466.vtk\n","Processed mesh_588.ply to data_vtk/mesh_588.vtk\n","Processed mesh_105.ply to data_vtk/mesh_105.vtk\n","Processed mesh_034.ply to data_vtk/mesh_034.vtk\n","Processed mesh_160.ply to data_vtk/mesh_160.vtk\n","Processed mesh_339.ply to data_vtk/mesh_339.vtk\n","Processed mesh_179.ply to data_vtk/mesh_179.vtk\n","Processed mesh_291.ply to data_vtk/mesh_291.vtk\n","Processed mesh_234.ply to data_vtk/mesh_234.vtk\n","Processed mesh_252.ply to data_vtk/mesh_252.vtk\n","Processed mesh_085.ply to data_vtk/mesh_085.vtk\n","Processed mesh_566.ply to data_vtk/mesh_566.vtk\n","Processed mesh_536.ply to data_vtk/mesh_536.vtk\n","Processed mesh_159.ply to data_vtk/mesh_159.vtk\n","Processed mesh_616.ply to data_vtk/mesh_616.vtk\n","Processed mesh_214.ply to data_vtk/mesh_214.vtk\n","Processed mesh_612.ply to data_vtk/mesh_612.vtk\n","Processed mesh_357.ply to data_vtk/mesh_357.vtk\n","Processed mesh_345.ply to data_vtk/mesh_345.vtk\n","Processed mesh_246.ply to data_vtk/mesh_246.vtk\n","Processed mesh_655.ply to data_vtk/mesh_655.vtk\n","Processed mesh_131.ply to data_vtk/mesh_131.vtk\n","Processed mesh_110.ply to data_vtk/mesh_110.vtk\n","Processed mesh_412.ply to data_vtk/mesh_412.vtk\n","Processed mesh_502.ply to data_vtk/mesh_502.vtk\n","Processed mesh_245.ply to data_vtk/mesh_245.vtk\n","Processed mesh_078.ply to data_vtk/mesh_078.vtk\n","Processed mesh_282.ply to data_vtk/mesh_282.vtk\n","Processed mesh_013.ply to data_vtk/mesh_013.vtk\n","Processed mesh_347.ply to data_vtk/mesh_347.vtk\n","Processed mesh_615.ply to data_vtk/mesh_615.vtk\n","Processed mesh_467.ply to data_vtk/mesh_467.vtk\n","Processed mesh_137.ply to data_vtk/mesh_137.vtk\n","Processed mesh_071.ply to data_vtk/mesh_071.vtk\n","Processed mesh_115.ply to data_vtk/mesh_115.vtk\n","Processed mesh_430.ply to data_vtk/mesh_430.vtk\n","Processed mesh_573.ply to data_vtk/mesh_573.vtk\n","Processed mesh_618.ply to data_vtk/mesh_618.vtk\n","Processed mesh_064.ply to data_vtk/mesh_064.vtk\n","Processed mesh_319.ply to data_vtk/mesh_319.vtk\n","Processed mesh_583.ply to data_vtk/mesh_583.vtk\n","Processed mesh_362.ply to data_vtk/mesh_362.vtk\n","Processed mesh_521.ply to data_vtk/mesh_521.vtk\n","Processed mesh_129.ply to data_vtk/mesh_129.vtk\n","Processed mesh_192.ply to data_vtk/mesh_192.vtk\n","Processed mesh_371.ply to data_vtk/mesh_371.vtk\n","Processed mesh_337.ply to data_vtk/mesh_337.vtk\n","Processed mesh_109.ply to data_vtk/mesh_109.vtk\n","Processed mesh_008.ply to data_vtk/mesh_008.vtk\n","Processed mesh_435.ply to data_vtk/mesh_435.vtk\n","Processed mesh_079.ply to data_vtk/mesh_079.vtk\n","Processed mesh_656.ply to data_vtk/mesh_656.vtk\n","Processed mesh_040.ply to data_vtk/mesh_040.vtk\n","Processed mesh_301.ply to data_vtk/mesh_301.vtk\n","Processed mesh_308.ply to data_vtk/mesh_308.vtk\n","Processed mesh_407.ply to data_vtk/mesh_407.vtk\n","Processed mesh_155.ply to data_vtk/mesh_155.vtk\n","Processed mesh_123.ply to data_vtk/mesh_123.vtk\n","Processed mesh_647.ply to data_vtk/mesh_647.vtk\n","Processed mesh_267.ply to data_vtk/mesh_267.vtk\n","Processed mesh_229.ply to data_vtk/mesh_229.vtk\n","Processed mesh_321.ply to data_vtk/mesh_321.vtk\n","Processed mesh_031.ply to data_vtk/mesh_031.vtk\n","Processed mesh_374.ply to data_vtk/mesh_374.vtk\n","Processed mesh_596.ply to data_vtk/mesh_596.vtk\n","Processed mesh_294.ply to data_vtk/mesh_294.vtk\n","Processed mesh_475.ply to data_vtk/mesh_475.vtk\n","Processed mesh_127.ply to data_vtk/mesh_127.vtk\n","Processed mesh_399.ply to data_vtk/mesh_399.vtk\n","Processed mesh_470.ply to data_vtk/mesh_470.vtk\n","Processed mesh_620.ply to data_vtk/mesh_620.vtk\n","Processed mesh_061.ply to data_vtk/mesh_061.vtk\n","Processed mesh_334.ply to data_vtk/mesh_334.vtk\n","Processed mesh_378.ply to data_vtk/mesh_378.vtk\n","Processed mesh_279.ply to data_vtk/mesh_279.vtk\n","Processed mesh_472.ply to data_vtk/mesh_472.vtk\n","Processed mesh_560.ply to data_vtk/mesh_560.vtk\n","Processed mesh_150.ply to data_vtk/mesh_150.vtk\n","Processed mesh_084.ply to data_vtk/mesh_084.vtk\n","Processed mesh_353.ply to data_vtk/mesh_353.vtk\n","Processed mesh_436.ply to data_vtk/mesh_436.vtk\n","Processed mesh_427.ply to data_vtk/mesh_427.vtk\n","Processed mesh_365.ply to data_vtk/mesh_365.vtk\n","Processed mesh_299.ply to data_vtk/mesh_299.vtk\n","Processed mesh_198.ply to data_vtk/mesh_198.vtk\n","Processed mesh_431.ply to data_vtk/mesh_431.vtk\n","Processed mesh_096.ply to data_vtk/mesh_096.vtk\n","Processed mesh_512.ply to data_vtk/mesh_512.vtk\n","Processed mesh_044.ply to data_vtk/mesh_044.vtk\n","Processed mesh_175.ply to data_vtk/mesh_175.vtk\n","Processed mesh_495.ply to data_vtk/mesh_495.vtk\n","Processed mesh_275.ply to data_vtk/mesh_275.vtk\n","Processed mesh_054.ply to data_vtk/mesh_054.vtk\n","Processed mesh_269.ply to data_vtk/mesh_269.vtk\n","Processed mesh_146.ply to data_vtk/mesh_146.vtk\n","Processed mesh_561.ply to data_vtk/mesh_561.vtk\n","Processed mesh_338.ply to data_vtk/mesh_338.vtk\n","Processed mesh_297.ply to data_vtk/mesh_297.vtk\n","Processed mesh_565.ply to data_vtk/mesh_565.vtk\n","Processed mesh_352.ply to data_vtk/mesh_352.vtk\n","Processed mesh_126.ply to data_vtk/mesh_126.vtk\n","Processed mesh_468.ply to data_vtk/mesh_468.vtk\n","Processed mesh_051.ply to data_vtk/mesh_051.vtk\n","Processed mesh_591.ply to data_vtk/mesh_591.vtk\n","Processed mesh_372.ply to data_vtk/mesh_372.vtk\n","Processed mesh_268.ply to data_vtk/mesh_268.vtk\n","Processed mesh_248.ply to data_vtk/mesh_248.vtk\n","Processed mesh_397.ply to data_vtk/mesh_397.vtk\n","Processed mesh_539.ply to data_vtk/mesh_539.vtk\n","Processed mesh_259.ply to data_vtk/mesh_259.vtk\n","Processed mesh_598.ply to data_vtk/mesh_598.vtk\n","Processed mesh_331.ply to data_vtk/mesh_331.vtk\n","Processed mesh_600.ply to data_vtk/mesh_600.vtk\n","Processed mesh_221.ply to data_vtk/mesh_221.vtk\n","Processed mesh_646.ply to data_vtk/mesh_646.vtk\n","Processed mesh_417.ply to data_vtk/mesh_417.vtk\n","Processed mesh_457.ply to data_vtk/mesh_457.vtk\n","Processed mesh_195.ply to data_vtk/mesh_195.vtk\n","Processed mesh_333.ply to data_vtk/mesh_333.vtk\n","Processed mesh_070.ply to data_vtk/mesh_070.vtk\n","Processed mesh_622.ply to data_vtk/mesh_622.vtk\n","Processed mesh_639.ply to data_vtk/mesh_639.vtk\n","Processed mesh_232.ply to data_vtk/mesh_232.vtk\n","Processed mesh_649.ply to data_vtk/mesh_649.vtk\n","Processed mesh_574.ply to data_vtk/mesh_574.vtk\n","Processed mesh_196.ply to data_vtk/mesh_196.vtk\n","Processed mesh_643.ply to data_vtk/mesh_643.vtk\n","Processed mesh_069.ply to data_vtk/mesh_069.vtk\n","Processed mesh_323.ply to data_vtk/mesh_323.vtk\n","Processed mesh_424.ply to data_vtk/mesh_424.vtk\n","Processed mesh_469.ply to data_vtk/mesh_469.vtk\n","Processed mesh_402.ply to data_vtk/mesh_402.vtk\n","Processed mesh_635.ply to data_vtk/mesh_635.vtk\n","Processed mesh_152.ply to data_vtk/mesh_152.vtk\n","Processed mesh_177.ply to data_vtk/mesh_177.vtk\n","Processed mesh_549.ply to data_vtk/mesh_549.vtk\n","Processed mesh_230.ply to data_vtk/mesh_230.vtk\n","Processed mesh_522.ply to data_vtk/mesh_522.vtk\n","Processed mesh_298.ply to data_vtk/mesh_298.vtk\n","Processed mesh_076.ply to data_vtk/mesh_076.vtk\n","Processed mesh_464.ply to data_vtk/mesh_464.vtk\n","Processed mesh_183.ply to data_vtk/mesh_183.vtk\n","Processed mesh_060.ply to data_vtk/mesh_060.vtk\n","Processed mesh_577.ply to data_vtk/mesh_577.vtk\n","Processed mesh_113.ply to data_vtk/mesh_113.vtk\n","Processed mesh_029.ply to data_vtk/mesh_029.vtk\n","Processed mesh_063.ply to data_vtk/mesh_063.vtk\n","Processed mesh_644.ply to data_vtk/mesh_644.vtk\n","Processed mesh_487.ply to data_vtk/mesh_487.vtk\n","Processed mesh_493.ply to data_vtk/mesh_493.vtk\n","Processed mesh_276.ply to data_vtk/mesh_276.vtk\n","Processed mesh_450.ply to data_vtk/mesh_450.vtk\n","Processed mesh_497.ply to data_vtk/mesh_497.vtk\n","Processed mesh_030.ply to data_vtk/mesh_030.vtk\n","Processed mesh_523.ply to data_vtk/mesh_523.vtk\n","Processed mesh_065.ply to data_vtk/mesh_065.vtk\n","Processed mesh_562.ply to data_vtk/mesh_562.vtk\n","Processed mesh_488.ply to data_vtk/mesh_488.vtk\n","Processed mesh_138.ply to data_vtk/mesh_138.vtk\n","Processed mesh_257.ply to data_vtk/mesh_257.vtk\n","Processed mesh_083.ply to data_vtk/mesh_083.vtk\n","Processed mesh_059.ply to data_vtk/mesh_059.vtk\n","Processed mesh_315.ply to data_vtk/mesh_315.vtk\n","Processed mesh_010.ply to data_vtk/mesh_010.vtk\n","Processed mesh_191.ply to data_vtk/mesh_191.vtk\n","Processed mesh_625.ply to data_vtk/mesh_625.vtk\n","Processed mesh_553.ply to data_vtk/mesh_553.vtk\n","Processed mesh_227.ply to data_vtk/mesh_227.vtk\n","Processed mesh_002.ply to data_vtk/mesh_002.vtk\n","Processed mesh_028.ply to data_vtk/mesh_028.vtk\n","Processed mesh_415.ply to data_vtk/mesh_415.vtk\n","Processed mesh_186.ply to data_vtk/mesh_186.vtk\n","Processed mesh_480.ply to data_vtk/mesh_480.vtk\n","Processed mesh_006.ply to data_vtk/mesh_006.vtk\n","Processed mesh_404.ply to data_vtk/mesh_404.vtk\n","Processed mesh_180.ply to data_vtk/mesh_180.vtk\n","Processed mesh_045.ply to data_vtk/mesh_045.vtk\n","Processed mesh_241.ply to data_vtk/mesh_241.vtk\n","Processed mesh_260.ply to data_vtk/mesh_260.vtk\n","Processed mesh_202.ply to data_vtk/mesh_202.vtk\n","Processed mesh_073.ply to data_vtk/mesh_073.vtk\n","Processed mesh_335.ply to data_vtk/mesh_335.vtk\n","Processed mesh_375.ply to data_vtk/mesh_375.vtk\n","Processed mesh_623.ply to data_vtk/mesh_623.vtk\n","Processed mesh_149.ply to data_vtk/mesh_149.vtk\n","Processed mesh_327.ply to data_vtk/mesh_327.vtk\n","Processed mesh_124.ply to data_vtk/mesh_124.vtk\n","Processed mesh_125.ply to data_vtk/mesh_125.vtk\n","Processed mesh_446.ply to data_vtk/mesh_446.vtk\n","Processed mesh_283.ply to data_vtk/mesh_283.vtk\n","Processed mesh_332.ply to data_vtk/mesh_332.vtk\n","Processed mesh_529.ply to data_vtk/mesh_529.vtk\n","Processed mesh_568.ply to data_vtk/mesh_568.vtk\n","Processed mesh_437.ply to data_vtk/mesh_437.vtk\n","Processed mesh_453.ply to data_vtk/mesh_453.vtk\n","Processed mesh_513.ply to data_vtk/mesh_513.vtk\n","Processed mesh_142.ply to data_vtk/mesh_142.vtk\n","Processed mesh_203.ply to data_vtk/mesh_203.vtk\n","Processed mesh_538.ply to data_vtk/mesh_538.vtk\n","Processed mesh_116.ply to data_vtk/mesh_116.vtk\n","Processed mesh_130.ply to data_vtk/mesh_130.vtk\n","Processed mesh_651.ply to data_vtk/mesh_651.vtk\n","Processed mesh_278.ply to data_vtk/mesh_278.vtk\n","Processed mesh_550.ply to data_vtk/mesh_550.vtk\n","Processed mesh_090.ply to data_vtk/mesh_090.vtk\n","Processed mesh_136.ply to data_vtk/mesh_136.vtk\n","Processed mesh_617.ply to data_vtk/mesh_617.vtk\n","Processed mesh_244.ply to data_vtk/mesh_244.vtk\n","Processed mesh_077.ply to data_vtk/mesh_077.vtk\n","Processed mesh_056.ply to data_vtk/mesh_056.vtk\n","Processed mesh_552.ply to data_vtk/mesh_552.vtk\n","Processed mesh_050.ply to data_vtk/mesh_050.vtk\n","Processed mesh_312.ply to data_vtk/mesh_312.vtk\n","Processed mesh_144.ply to data_vtk/mesh_144.vtk\n","Processed mesh_408.ply to data_vtk/mesh_408.vtk\n","Processed mesh_449.ply to data_vtk/mesh_449.vtk\n"]}],"source":["import os\n","import vtk\n","import numpy as np\n","import open3d as o3d\n","\n","\n","def read_ply(file_path):\n","    # 读取 PLY 文件\n","    mesh = o3d.io.read_triangle_mesh(file_path)\n","\n","    # 获取顶点和面数据\n","    vertices = np.asarray(mesh.vertices)\n","    triangles = np.asarray(mesh.triangles)\n","\n","    return vertices, triangles\n","\n","\n","def load_pressure(pressure_file_path):\n","    press = np.load(pressure_file_path).reshape((-1,)).astype(np.float32)\n","    press = np.concatenate((press[0:16], press[112:]), axis=0)\n","    return press\n","\n","\n","def write_vtk(vertices, triangles, npy_data, output_path):\n","    # 创建 vtkPoints 对象\n","    points = vtk.vtkPoints()\n","    for vertex in vertices:\n","        points.InsertNextPoint(vertex)\n","\n","    # 创建 vtkCellArray 对象\n","    cells = vtk.vtkCellArray()\n","    for triangle in triangles:\n","        cells.InsertNextCell(3)\n","        cells.InsertCellPoint(triangle[0])\n","        cells.InsertCellPoint(triangle[1])\n","        cells.InsertCellPoint(triangle[2])\n","\n","    # 创建 vtkUnstructuredGrid 对象\n","    unstructured_grid = vtk.vtkUnstructuredGrid()\n","    unstructured_grid.SetPoints(points)\n","    unstructured_grid.SetCells(vtk.VTK_TRIANGLE, cells)\n","\n","    # 创建 vtkFloatArray 对象存储 NPY 数据\n","    pressure = vtk.vtkFloatArray()\n","    pressure.SetName(\"Pressure\")\n","    for value in npy_data:\n","        pressure.InsertNextValue(value)\n","\n","    # 将压力数据添加到点数据\n","    unstructured_grid.GetPointData().AddArray(pressure)\n","\n","    # 写入 VTK 文件\n","    writer = vtk.vtkUnstructuredGridWriter()\n","    writer.SetFileName(output_path)\n","    writer.SetInputData(unstructured_grid)\n","    writer.Write()\n","\n","\n","def read_vtk(file_path):\n","    # 读取 VTK 文件\n","    reader = vtk.vtkUnstructuredGridReader()\n","    reader.SetFileName(file_path)\n","    reader.Update()\n","\n","    # 获取数据\n","    unstructured_grid = reader.GetOutput()\n","\n","    if not unstructured_grid:\n","        print(f\"Failed to read the VTK file: {file_path}\")\n","        return\n","\n","    # 打印顶点信息\n","    points = unstructured_grid.GetPoints()\n","\n","    if not points:\n","        print(f\"No points found in the VTK file: {file_path}\")\n","        return\n","\n","    vertices = np.array([points.GetPoint(i) for i in range(points.GetNumberOfPoints())])\n","    print(f\"Number of vertices: {vertices.shape[0]}\")\n","    print(\"Vertices (x, y, z):\")\n","    for i, vertex in enumerate(vertices):\n","        print(f\"Vertex {i}: {vertex}\")\n","\n","    # 打印面信息\n","    cells = unstructured_grid.GetCells()\n","\n","    if not cells:\n","        print(f\"No cells found in the VTK file: {file_path}\")\n","        return\n","\n","    cells.InitTraversal()\n","    id_list = vtk.vtkIdList()\n","    cell_indices = []\n","    while cells.GetNextCell(id_list):\n","        ids = [id_list.GetId(j) for j in range(id_list.GetNumberOfIds())]\n","        cell_indices.append(ids)\n","\n","    print(f\"\\nNumber of cells: {len(cell_indices)}\")\n","    print(\"Cells (vertex indices):\")\n","    for i, cell in enumerate(cell_indices):\n","        v_indices = [vertices[idx] for idx in cell]\n","        print(f\"Cell {i}: {cell} -> Vertices: {v_indices}\")\n","\n","    # 打印压力数据\n","    pressure = unstructured_grid.GetPointData().GetArray(\"Pressure\")\n","    if pressure:\n","        print(\"\\nPressure data:\")\n","        for i in range(pressure.GetNumberOfTuples()):\n","            print(f\"Point {i}: Pressure = {pressure.GetValue(i)}\")\n","    else:\n","        print(\"\\nNo pressure data found\")\n","\n","\n","def process_directory(input_dir, output_dir):\n","    if not os.path.exists(output_dir):\n","        os.makedirs(output_dir)\n","\n","    for file_name in os.listdir(input_dir):\n","        if file_name.endswith(\".ply\"):\n","            ply_file_path = os.path.join(input_dir, file_name)\n","            mesh_index = file_name.replace(\"mesh_\", \"\").replace(\".ply\", \"\")\n","            npy_file_name = f\"press_{mesh_index}.npy\"\n","            npy_file_path = os.path.join(input_dir, npy_file_name)\n","            vtk_file_path = os.path.join(output_dir, file_name.replace(\".ply\", \".vtk\"))\n","\n","            if os.path.exists(npy_file_path):\n","                vertices, triangles = read_ply(ply_file_path)\n","                npy_data = load_pressure(npy_file_path)\n","                write_vtk(vertices, triangles, npy_data, vtk_file_path)\n","                print(f\"Processed {file_name} to {vtk_file_path}\")\n","            else:\n","                print(f\"Pressure file for {file_name} not found.\")\n","\n","\n","# 设置输入和输出目录\n","input_directory = \"data\"\n","output_directory = \"data_vtk\"\n","\n","# 处理目录下的所有文件\n","process_directory(input_directory, output_directory)\n","\n"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":2864,"status":"ok","timestamp":1720670872779,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"oD2xmLDL-Yu3","outputId":"4267d74f-5ee0-4dd0-a6fd-46f9d86a7f07"},"outputs":[{"name":"stdout","output_type":"stream","text":["Processed mesh_658.ply to track_A_vtk/mesh_658.vtk with 3586 vertices\n","Processed mesh_659.ply to track_A_vtk/mesh_659.vtk with 3586 vertices\n","Processed mesh_660.ply to track_A_vtk/mesh_660.vtk with 3586 vertices\n","Processed mesh_662.ply to track_A_vtk/mesh_662.vtk with 3586 vertices\n","Processed mesh_663.ply to track_A_vtk/mesh_663.vtk with 3586 vertices\n","Processed mesh_664.ply to track_A_vtk/mesh_664.vtk with 3586 vertices\n","Processed mesh_665.ply to track_A_vtk/mesh_665.vtk with 3586 vertices\n","Processed mesh_666.ply to track_A_vtk/mesh_666.vtk with 3586 vertices\n","Processed mesh_667.ply to track_A_vtk/mesh_667.vtk with 3586 vertices\n","Processed mesh_668.ply to track_A_vtk/mesh_668.vtk with 3586 vertices\n","Processed mesh_672.ply to track_A_vtk/mesh_672.vtk with 3586 vertices\n","Processed mesh_673.ply to track_A_vtk/mesh_673.vtk with 3586 vertices\n","Processed mesh_674.ply to track_A_vtk/mesh_674.vtk with 3586 vertices\n","Processed mesh_675.ply to track_A_vtk/mesh_675.vtk with 3586 vertices\n","Processed mesh_676.ply to track_A_vtk/mesh_676.vtk with 3586 vertices\n","Processed mesh_677.ply to track_A_vtk/mesh_677.vtk with 3586 vertices\n","Processed mesh_678.ply to track_A_vtk/mesh_678.vtk with 3586 vertices\n","Processed mesh_679.ply to track_A_vtk/mesh_679.vtk with 3586 vertices\n","Processed mesh_681.ply to track_A_vtk/mesh_681.vtk with 3586 vertices\n","Processed mesh_683.ply to track_A_vtk/mesh_683.vtk with 3586 vertices\n","Processed mesh_684.ply to track_A_vtk/mesh_684.vtk with 3586 vertices\n","Processed mesh_686.ply to track_A_vtk/mesh_686.vtk with 3586 vertices\n","Processed mesh_687.ply to track_A_vtk/mesh_687.vtk with 3586 vertices\n","Processed mesh_688.ply to track_A_vtk/mesh_688.vtk with 3586 vertices\n","Processed mesh_689.ply to track_A_vtk/mesh_689.vtk with 3586 vertices\n","Processed mesh_690.ply to track_A_vtk/mesh_690.vtk with 3586 vertices\n","Processed mesh_691.ply to track_A_vtk/mesh_691.vtk with 3586 vertices\n","Processed mesh_692.ply to track_A_vtk/mesh_692.vtk with 3586 vertices\n","Processed mesh_693.ply to track_A_vtk/mesh_693.vtk with 3586 vertices\n","Processed mesh_695.ply to track_A_vtk/mesh_695.vtk with 3586 vertices\n","Processed mesh_696.ply to track_A_vtk/mesh_696.vtk with 3586 vertices\n","Processed mesh_697.ply to track_A_vtk/mesh_697.vtk with 3586 vertices\n","Processed mesh_700.ply to track_A_vtk/mesh_700.vtk with 3586 vertices\n","Processed mesh_701.ply to track_A_vtk/mesh_701.vtk with 3586 vertices\n","Processed mesh_702.ply to track_A_vtk/mesh_702.vtk with 3586 vertices\n","Processed mesh_703.ply to track_A_vtk/mesh_703.vtk with 3586 vertices\n","Processed mesh_704.ply to track_A_vtk/mesh_704.vtk with 3586 vertices\n","Processed mesh_705.ply to track_A_vtk/mesh_705.vtk with 3586 vertices\n","Processed mesh_708.ply to track_A_vtk/mesh_708.vtk with 3586 vertices\n","Processed mesh_709.ply to track_A_vtk/mesh_709.vtk with 3586 vertices\n","Processed mesh_710.ply to track_A_vtk/mesh_710.vtk with 3586 vertices\n","Processed mesh_711.ply to track_A_vtk/mesh_711.vtk with 3586 vertices\n","Processed mesh_712.ply to track_A_vtk/mesh_712.vtk with 3586 vertices\n","Processed mesh_713.ply to track_A_vtk/mesh_713.vtk with 3586 vertices\n","Processed mesh_715.ply to track_A_vtk/mesh_715.vtk with 3586 vertices\n","Processed mesh_717.ply to track_A_vtk/mesh_717.vtk with 3586 vertices\n","Processed mesh_718.ply to track_A_vtk/mesh_718.vtk with 3586 vertices\n","Processed mesh_719.ply to track_A_vtk/mesh_719.vtk with 3586 vertices\n","Processed mesh_721.ply to track_A_vtk/mesh_721.vtk with 3586 vertices\n","Processed mesh_722.ply to track_A_vtk/mesh_722.vtk with 3586 vertices\n"]}],"source":["import os\n","import vtk\n","import numpy as np\n","import open3d as o3d\n","\n","def read_ply(file_path):\n","    # 读取 PLY 文件\n","    mesh = o3d.io.read_triangle_mesh(file_path)\n","\n","    # 获取顶点和面数据\n","    vertices = np.asarray(mesh.vertices)\n","    triangles = np.asarray(mesh.triangles)\n","\n","    return vertices, triangles\n","\n","def adjust_vertices_and_triangles(vertices):\n","    # 假设原始结点数为3682，调整为3586\n","    if vertices.shape[0] == 3682:\n","        retained_indices = list(range(16)) + list(range(112, 3682))\n","        retained_indices_set = set(retained_indices)\n","\n","        adjusted_vertices = vertices[retained_indices]\n","\n","        index_map = {old_idx: new_idx for new_idx, old_idx in enumerate(retained_indices)}\n","        return adjusted_vertices, index_map\n","    else:\n","        return vertices, None\n","\n","def adjust_triangles(triangles, index_map):\n","    if index_map is not None:\n","        adjusted_triangles = []\n","        for triangle in triangles:\n","            if all(idx in index_map for idx in triangle):\n","                adjusted_triangles.append([index_map[idx] for idx in triangle])\n","        return np.array(adjusted_triangles)\n","    else:\n","        return triangles\n","\n","def write_vtk(vertices, triangles, output_path):\n","    points = vtk.vtkPoints()\n","    for vertex in vertices:\n","        points.InsertNextPoint(vertex)\n","\n","    cells = vtk.vtkCellArray()\n","    for triangle in triangles:\n","        cells.InsertNextCell(3)\n","        cells.InsertCellPoint(triangle[0])\n","        cells.InsertCellPoint(triangle[1])\n","        cells.InsertCellPoint(triangle[2])\n","\n","    unstructured_grid = vtk.vtkUnstructuredGrid()\n","    unstructured_grid.SetPoints(points)\n","    unstructured_grid.SetCells(vtk.VTK_TRIANGLE, cells)\n","\n","    writer = vtk.vtkUnstructuredGridWriter()\n","    writer.SetFileName(output_path)\n","    writer.SetInputData(unstructured_grid)\n","    writer.Write()\n","\n","def process_directory(input_dir, output_dir, file_list):\n","    if not os.path.exists(output_dir):\n","        os.makedirs(output_dir)\n","\n","    for file_id in file_list:\n","        ply_file_name = f\"mesh_{file_id}.ply\"\n","        ply_file_path = os.path.join(input_dir, ply_file_name)\n","        vtk_file_name = f\"mesh_{file_id}.vtk\"\n","        vtk_file_path = os.path.join(output_dir, vtk_file_name)\n","\n","        if os.path.exists(ply_file_path):\n","            vertices, triangles = read_ply(ply_file_path)\n","            vertices, index_map = adjust_vertices_and_triangles(vertices)\n","            triangles = adjust_triangles(triangles, index_map)\n","            write_vtk(vertices, triangles, vtk_file_path)\n","            print(f\"Processed {ply_file_name} to {vtk_file_path} with {vertices.shape[0]} vertices\")\n","        else:\n","            print(f\"{ply_file_name} not found in {input_dir}\")\n","\n","def read_file_list(file_path):\n","    with open(file_path, 'r') as f:\n","        return [line.strip() for line in f]\n","\n","# 设置输入和输出目录\n","input_directory = \"track_A\"\n","output_directory = \"track_A_vtk\"\n","file_list_path = \"track_A/watertight_meshes.txt\"\n","\n","# 读取文件列表\n","file_list = read_file_list(file_list_path)\n","\n","# 处理目录下的所有文件\n","process_directory(input_directory, output_directory, file_list)\n"]},{"cell_type":"markdown","metadata":{"id":"t2ginGDI2_hZ"},"source":["B数据集的处理"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"FXWGch1b3DDh"},"outputs":[],"source":["import os\n","import vtk\n","import numpy as np\n","\n","def load_centroid(file_path):\n","    centroid = np.load(file_path).reshape((-1, 3)).astype(np.float32)\n","    return centroid\n","\n","def load_pressure(file_path):\n","    press = np.load(file_path).reshape((-1,)).astype(np.float32)\n","    return press\n","\n","def write_vtk(vertices, pressure_data, output_path):\n","    # 创建 vtkPoints 对象\n","    points = vtk.vtkPoints()\n","    for idx, vertex in enumerate(vertices):\n","        points.InsertNextPoint(vertex)\n","\n","    # 创建 vtkCellArray 对象\n","    cells = vtk.vtkCellArray()\n","    for idx in range(len(vertices)):\n","        cells.InsertNextCell(1)\n","        cells.InsertCellPoint(idx)\n","\n","    # 创建 vtkUnstructuredGrid 对象\n","    unstructured_grid = vtk.vtkUnstructuredGrid()\n","    unstructured_grid.SetPoints(points)\n","    unstructured_grid.SetCells(vtk.VTK_VERTEX, cells)\n","\n","    # 创建 vtkFloatArray 对象存储压力数据\n","    pressure = vtk.vtkFloatArray()\n","    pressure.SetName(\"Pressure\")\n","    for idx, value in enumerate(pressure_data):\n","        pressure.InsertNextValue(value)\n","\n","    # 将压力数据添加到点数据\n","    unstructured_grid.GetPointData().AddArray(pressure)\n","\n","    # 写入 VTK 文件\n","    writer = vtk.vtkUnstructuredGridWriter()\n","    writer.SetFileName(output_path)\n","    writer.SetInputData(unstructured_grid)\n","    writer.Write()\n","\n","def process_directory(input_centroid_dir, input_pressure_dir, output_dir):\n","    if not os.path.exists(output_dir):\n","        os.makedirs(output_dir)\n","\n","    for file_name in os.listdir(input_centroid_dir):\n","        if file_name.endswith(\".npy\") and file_name.startswith(\"centroid_\"):\n","            mesh_index = file_name.replace(\"centroid_\", \"\").replace(\".npy\", \"\")\n","            centroid_file_path = os.path.join(input_centroid_dir, file_name)\n","            pressure_file_name = f\"press_{mesh_index}.npy\"\n","            pressure_file_path = os.path.join(input_pressure_dir, pressure_file_name)\n","            vtk_file_path = os.path.join(output_dir, f\"mesh_{mesh_index}.vtk\")\n","\n","            if os.path.exists(pressure_file_path):\n","                vertices = load_centroid(centroid_file_path)\n","                pressure_data = load_pressure(pressure_file_path)\n","\n","                # 确保压力数据大小与点数量匹配\n","                num_vertices = vertices.shape[0]\n","                num_pressure = pressure_data.shape[0]\n","\n","                if num_pressure > num_vertices:\n","                    print(f\"Warning: Pressure data for {file_name} is larger than the number of points. Trimming extra data.\")\n","                    pressure_data = pressure_data[:num_vertices]\n","                elif num_pressure < num_vertices:\n","                    print(f\"Warning: Pressure data for {file_name} is smaller than the number of points. Trimming extra points.\")\n","                    vertices = vertices[:num_pressure]\n","\n","                write_vtk(vertices, pressure_data, vtk_file_path)\n","                print(f\"Processed {file_name} to {vtk_file_path}\")\n","            else:\n","                print(f\"Pressure file for {file_name} not found.\")\n","\n","# 设置输入和输出目录\n","input_centroid_directory = \"data_track_B\"\n","input_pressure_directory = \"data_track_B\"\n","output_directory = \"data_centroid_track_B_vtk\"\n","\n","\n","# 处理目录下的所有文件\n","process_directory(input_centroid_directory, input_pressure_directory, output_directory)\n"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"fAswZu3S3OkT"},"outputs":[],"source":["import os\n","import vtk\n","import numpy as np\n","\n","def load_centroid(file_path):\n","    centroid = np.load(file_path).reshape((-1, 3)).astype(np.float32)\n","    return centroid\n","\n","def write_vtk(vertices, output_path):\n","    # 创建 vtkPoints 对象\n","    points = vtk.vtkPoints()\n","    for idx, vertex in enumerate(vertices):\n","        points.InsertNextPoint(vertex)\n","\n","    # 创建 vtkCellArray 对象\n","    cells = vtk.vtkCellArray()\n","    for idx in range(len(vertices)):\n","        cells.InsertNextCell(1)\n","        cells.InsertCellPoint(idx)\n","\n","    # 创建 vtkUnstructuredGrid 对象\n","    unstructured_grid = vtk.vtkUnstructuredGrid()\n","    unstructured_grid.SetPoints(points)\n","    unstructured_grid.SetCells(vtk.VTK_VERTEX, cells)\n","\n","    # 写入 VTK 文件\n","    writer = vtk.vtkUnstructuredGridWriter()\n","    writer.SetFileName(output_path)\n","    writer.SetInputData(unstructured_grid)\n","    writer.Write()\n","\n","def process_directory(input_centroid_dir, output_dir):\n","    if not os.path.exists(output_dir):\n","        os.makedirs(output_dir)\n","\n","    for file_name in os.listdir(input_centroid_dir):\n","        if file_name.endswith(\".npy\") and file_name.startswith(\"centroid_\"):\n","            mesh_index = file_name.replace(\"centroid_\", \"\").replace(\".npy\", \"\")\n","            centroid_file_path = os.path.join(input_centroid_dir, file_name)\n","            vtk_file_path = os.path.join(output_dir, f\"mesh_{mesh_index}.vtk\")\n","\n","            vertices = load_centroid(centroid_file_path)\n","            write_vtk(vertices, vtk_file_path)\n","            print(f\"Processed {file_name} to {vtk_file_path}\")\n","\n","# 设置输入和输出目录\n","input_centroid_directory = \"track_B\"\n","output_directory = \"track_B_vtk\"\n","\n","# 处理目录下的所有文件\n","process_directory(input_centroid_directory, output_directory)\n"]},{"cell_type":"markdown","metadata":{"id":"fGW5RaXnG6VO"},"source":["数据预处理"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"R1ZbVuVo-se9"},"outputs":[],"source":["import vtk\n","from scipy.spatial import ConvexHull\n","import torch\n","import os\n","import itertools\n","import random\n","import numpy as np\n","from torch_geometric import nn as nng\n","from sklearn.neighbors import NearestNeighbors\n","from torch_geometric.data import Data, Dataset\n","from torch_geometric.utils import k_hop_subgraph, subgraph\n","from vtk.util.numpy_support import vtk_to_numpy\n","from tqdm import tqdm\n","\n","\n","def load_unstructured_grid_data(file_name):\n","    reader = vtk.vtkUnstructuredGridReader()\n","    reader.SetFileName(file_name)\n","    reader.Update()\n","    output = reader.GetOutput()\n","    return output\n","\n","\n","def unstructured_grid_data_to_poly_data(unstructured_grid_data):\n","    filter = vtk.vtkDataSetSurfaceFilter()\n","    filter.SetInputData(unstructured_grid_data)\n","    filter.Update()\n","    poly_data = filter.GetOutput()\n","    return poly_data, filter\n","\n","\n","def get_sdf(target, boundary):\n","    nbrs = NearestNeighbors(n_neighbors=1).fit(boundary)\n","    dists, indices = nbrs.kneighbors(target)\n","    neis = np.array([boundary[i[0]] for i in indices])\n","    dirs = (target - neis) / (dists + 1e-8)\n","    return dists.reshape(-1), dirs\n","\n","\n","def get_normal(unstructured_grid_data):\n","    poly_data, surface_filter = unstructured_grid_data_to_poly_data(unstructured_grid_data)\n","    normal_filter = vtk.vtkPolyDataNormals()\n","    normal_filter.SetInputData(poly_data)\n","    normal_filter.SetAutoOrientNormals(1)\n","    normal_filter.SetConsistency(1)\n","    normal_filter.SetComputeCellNormals(1)\n","    normal_filter.SetComputePointNormals(0)\n","    normal_filter.Update()\n","\n","    unstructured_grid_data.GetCellData().SetNormals(normal_filter.GetOutput().GetCellData().GetNormals())\n","    c2p = vtk.vtkCellDataToPointData()\n","    c2p.SetInputData(unstructured_grid_data)\n","    c2p.Update()\n","    unstructured_grid_data = c2p.GetOutput()\n","    normal = vtk_to_numpy(c2p.GetOutput().GetPointData().GetNormals()).astype(np.double)\n","    normal /= (np.max(np.abs(normal), axis=1, keepdims=True) + 1e-8)\n","    normal /= (np.linalg.norm(normal, axis=1, keepdims=True) + 1e-8)\n","    if np.isnan(normal).sum() > 0:\n","        print(np.isnan(normal).sum())\n","        print(\"recalculate\")\n","        return get_normal(unstructured_grid_data)  # re-calculate\n","    return normal\n","\n","\n","def visualize_poly_data(poly_data, surface_filter, normal_filter=None):\n","    if normal_filter is not None:\n","        mask = vtk.vtkMaskPoints()\n","        mask.SetInputData(normal_filter.GetOutput())\n","        mask.Update()\n","        arrow = vtk.vtkArrowSource()\n","        arrow.Update()\n","        glyph = vtk.vtkGlyph3D()\n","        glyph.SetInputData(mask.GetOutput())\n","        glyph.SetSourceData(arrow.GetOutput())\n","        glyph.SetVectorModeToUseNormal()\n","        glyph.SetScaleFactor(0.1)\n","        glyph.Update()\n","        norm_mapper = vtk.vtkPolyDataMapper()\n","        norm_mapper.SetInputData(normal_filter.GetOutput())\n","        glyph_mapper = vtk.vtkPolyDataMapper()\n","        glyph_mapper.SetInputData(glyph.GetOutput())\n","        norm_actor = vtk.vtkActor()\n","        norm_actor.SetMapper(norm_mapper)\n","        glyph_actor = vtk.vtkActor()\n","        glyph_actor.SetMapper(glyph_mapper)\n","        glyph_actor.GetProperty().SetColor(1, 0, 0)\n","        norm_render = vtk.vtkRenderer()\n","        norm_render.AddActor(norm_actor)\n","        norm_render.SetBackground(0, 1, 0)\n","        glyph_render = vtk.vtkRenderer()\n","        glyph_render.AddActor(glyph_actor)\n","        glyph_render.AddActor(norm_actor)\n","        glyph_render.SetBackground(0, 0, 1)\n","\n","    scalar_range = poly_data.GetScalarRange()\n","\n","    mapper = vtk.vtkDataSetMapper()\n","    mapper.SetInputConnection(surface_filter.GetOutputPort())\n","    mapper.SetScalarRange(scalar_range)\n","\n","    actor = vtk.vtkActor()\n","    actor.SetMapper(mapper)\n","\n","    renderer = vtk.vtkRenderer()\n","    renderer.AddActor(actor)\n","    renderer.SetBackground(1, 1, 1)  # Set background to white\n","\n","    renderer_window = vtk.vtkRenderWindow()\n","    renderer_window.AddRenderer(renderer)\n","    if normal_filter is not None:\n","        renderer_window.AddRenderer(norm_render)\n","        renderer_window.AddRenderer(glyph_render)\n","    renderer_window.Render()\n","\n","    interactor = vtk.vtkRenderWindowInteractor()\n","    interactor.SetRenderWindow(renderer_window)\n","    interactor.Initialize()\n","    interactor.Start()\n","\n","def get_scalar_data(unstructured_grid, scalar_name):\n","    point_data = unstructured_grid.GetPointData()\n","    if point_data:\n","        scalar_array = point_data.GetArray(scalar_name)\n","        if scalar_array:\n","            return vtk_to_numpy(scalar_array)\n","    return None\n","\n","\n","def acget_datalist(root, samples, norm=False, coef_norm=None, savedir=None, preprocessed=False):\n","    dataset = []\n","    mean_in, mean_out = 0, 0\n","    std_in, std_out = 0, 0\n","    for k, s in enumerate(tqdm(samples, desc=\"Processing samples\")):\n","        if preprocessed and savedir is not None:\n","            save_path = os.path.join(savedir, s)\n","            if not os.path.exists(save_path):\n","                continue\n","            init = np.load(os.path.join(save_path, 'x.npy'))\n","            target = np.load(os.path.join(save_path, 'y.npy'))\n","            pos = np.load(os.path.join(save_path, 'pos.npy'))\n","            surf = np.load(os.path.join(save_path, 'surf.npy'))\n","            edge_index = np.load(os.path.join(save_path, 'edge_index.npy'))\n","        else:\n","            file_name_press = os.path.join(root, s)\n","\n","            if not os.path.exists(file_name_press):\n","                continue\n","\n","            unstructured_grid_data_press = load_unstructured_grid_data(file_name_press)\n","\n","            scalar_names = [\"Pressure\", \"point_scalars\"]  # 包含可能的标量数据名称\n","            for scalar_name in scalar_names:\n","                press = get_scalar_data(unstructured_grid_data_press, scalar_name)\n","                if press is not None:\n","                    # print(f\"Scalar data '{scalar_name}' found with shape: {press.shape}\")\n","                    # print(press)\n","                    pass\n","                    break\n","\n","            points_press = vtk_to_numpy(unstructured_grid_data_press.GetPoints().GetData())\n","\n","            edges_press = get_edges(unstructured_grid_data_press, points_press, cell_size=3)\n","\n","            sdf_press = np.zeros(points_press.shape[0])\n","            normal_press = get_normal(unstructured_grid_data_press)\n","\n","            pos_surf = points_press\n","            sdf_surf = sdf_press\n","            normal_surf = normal_press\n","            press_surf = press\n","\n","            init_surf = np.c_[pos_surf, sdf_surf, normal_surf]\n","            target_surf = np.c_[np.zeros((len(pos_surf), 3)), press_surf]\n","\n","            surf = np.ones(len(pos_surf))\n","            pos = pos_surf\n","            init = init_surf\n","            target = target_surf\n","            # edge_index = get_edge_index(pos, edges_press, [])\n","            edge_index = get_edge_index(pos, edges_press, edges_press)  # 将 get_edge_index 调用修正为传递 edges\n","\n","            if savedir is not None:\n","                save_path = os.path.join(savedir, s)\n","                if not os.path.exists(save_path):\n","                    os.makedirs(save_path)\n","                np.save(os.path.join(save_path, 'x.npy'), init)\n","                np.save(os.path.join(save_path, 'y.npy'), target)\n","                np.save(os.path.join(save_path, 'pos.npy'), pos)\n","                np.save(os.path.join(save_path, 'surf.npy'), surf)\n","                np.save(os.path.join(save_path, 'edge_index.npy'), edge_index)\n","\n","        surf = torch.tensor(surf)\n","        pos = torch.tensor(pos)\n","        x = torch.tensor(init)\n","        y = torch.tensor(target)\n","        edge_index = torch.tensor(edge_index)\n","\n","        if norm and coef_norm is None:\n","            if k == 0:\n","                old_length = init.shape[0]\n","                mean_in = init.mean(axis=0)\n","                mean_out = target.mean(axis=0)\n","            else:\n","                new_length = old_length + init.shape[0]\n","                mean_in += (init.sum(axis=0) - init.shape[0] * mean_in) / new_length\n","                mean_out += (target.sum(axis=0) - init.shape[0] * mean_out) / new_length\n","                old_length = new_length\n","        data = Data(pos=pos, x=x, y=y, surf=surf.bool(), edge_index=edge_index)\n","        # data = Data(pos=pos, x=x, y=y, surf=surf.bool())\n","\n","        dataset.append(data)\n","\n","    if norm and coef_norm is None:\n","        for k, data in enumerate(dataset):\n","            if k == 0:\n","                old_length = data.x.numpy().shape[0]\n","                std_in = ((data.x.numpy() - mean_in) ** 2).sum(axis=0) / old_length\n","                std_out = ((data.y.numpy() - mean_out) ** 2).sum(axis=0) / old_length\n","            else:\n","                new_length = old_length + data.x.numpy().shape[0]\n","                std_in += (((data.x.numpy() - mean_in) ** 2).sum(axis=0) - data.x.numpy().shape[\n","                    0] * std_in) / new_length\n","                std_out += (((data.y.numpy() - mean_out) ** 2).sum(axis=0) - data.x.numpy().shape[\n","                    0] * std_out) / new_length\n","                old_length = new_length\n","\n","        std_in = np.sqrt(std_in)\n","        std_out = np.sqrt(std_out)\n","\n","        for data in dataset:\n","            data.x = ((data.x - mean_in) / (std_in + 1e-8)).float()\n","            data.y = ((data.y - mean_out) / (std_out + 1e-8)).float()\n","\n","        coef_norm = (mean_in, std_in, mean_out, std_out)\n","        dataset = (dataset, coef_norm)\n","\n","    elif coef_norm is not None:\n","        for data in dataset:\n","            data.x = ((data.x - coef_norm[0]) / (coef_norm[1] + 1e-8)).float()\n","            data.y = ((data.y - coef_norm[2]) / (coef_norm[3] + 1e-8)).float()\n","\n","    return dataset\n","\n","def ac_get_datalist_for_prediction(root, samples, norm=False, coef_norm=None, savedir=None, preprocessed=False):\n","    dataset = []\n","    mean_in, std_in = 0, 0\n","    for k, s in enumerate(tqdm(samples, desc=\"Processing samples\")):\n","        if preprocessed and savedir is not None:\n","            save_path = os.path.join(savedir, s)\n","            if not os.path.exists(save_path):\n","                continue\n","            init = np.load(os.path.join(save_path, 'x.npy'))\n","            pos = np.load(os.path.join(save_path, 'pos.npy'))\n","            surf = np.load(os.path.join(save_path, 'surf.npy'))\n","            edge_index = np.load(os.path.join(save_path, 'edge_index.npy'))\n","        else:\n","            file_name = os.path.join(root, s)\n","\n","            if not os.path.exists(file_name):\n","                continue\n","\n","            unstructured_grid_data = load_unstructured_grid_data(file_name)\n","            points = vtk_to_numpy(unstructured_grid_data.GetPoints().GetData())\n","            edges = get_edges(unstructured_grid_data, points, cell_size=3)\n","            sdf = np.zeros(points.shape[0])\n","            normal = get_normal(unstructured_grid_data)\n","\n","            pos_surf = points\n","            sdf_surf = sdf\n","            normal_surf = normal\n","\n","            init_surf = np.c_[pos_surf, sdf_surf, normal_surf]\n","            surf = np.ones(len(pos_surf))\n","            pos = pos_surf\n","            init = init_surf\n","            edge_index = get_edge_index(pos, edges, edges)  # 将 get_edge_index 调用修正为传递 edges\n","\n","            if savedir is not None:\n","                save_path = os.path.join(savedir, s)\n","                if not os.path.exists(save_path):\n","                    os.makedirs(save_path)\n","                np.save(os.path.join(save_path, 'x.npy'), init)\n","                np.save(os.path.join(save_path, 'pos.npy'), pos)\n","                np.save(os.path.join(save_path, 'surf.npy'), surf)\n","                np.save(os.path.join(save_path, 'edge_index.npy'), edge_index)\n","\n","        surf = torch.tensor(surf)\n","        pos = torch.tensor(pos)\n","        x = torch.tensor(init)\n","        y = torch.zeros((x.shape[0], 4))  # 创建全零的 y 属性\n","        edge_index = torch.tensor(edge_index)\n","\n","        if norm and coef_norm is None:\n","            if k == 0:\n","                old_length = init.shape[0]\n","                mean_in = init.mean(axis=0)\n","            else:\n","                new_length = old_length + init.shape[0]\n","                mean_in += (init.sum(axis=0) - init.shape[0] * mean_in) / new_length\n","                old_length = new_length\n","        data = Data(pos=pos, x=x, y=y, surf=surf.bool(), edge_index=edge_index)\n","\n","        dataset.append(data)\n","\n","    if norm and coef_norm is None:\n","        for k, data in enumerate(dataset):\n","            if k == 0:\n","                old_length = data.x.numpy().shape[0]\n","                std_in = ((data.x.numpy() - mean_in) ** 2).sum(axis=0) / old_length\n","            else:\n","                new_length = old_length + data.x.numpy().shape[0]\n","                std_in += (((data.x.numpy() - mean_in) ** 2).sum(axis=0) - data.x.numpy().shape[0] * std_in) / new_length\n","                old_length = new_length\n","\n","        std_in = np.sqrt(std_in)\n","\n","        for data in dataset:\n","            data.x = ((data.x - mean_in) / (std_in + 1e-8)).float()\n","\n","        coef_norm = (mean_in, std_in)\n","\n","    elif coef_norm is not None:\n","        for data in dataset:\n","            data.x = ((data.x - coef_norm[0]) / (coef_norm[1] + 1e-8)).float()\n","\n","    return dataset\n","\n","\n","def bcget_datalist(root, samples, norm=False, coef_norm=None, savedir=None, preprocessed=False):\n","    dataset = []\n","    mean_in, mean_out = 0, 0\n","    std_in, std_out = 0, 0\n","    for k, s in enumerate(tqdm(samples, desc=\"Processing samples\")):\n","        if preprocessed and savedir is not None:\n","            save_path = os.path.join(savedir, s)\n","            if not os.path.exists(save_path):\n","                continue\n","            init = np.load(os.path.join(save_path, 'x.npy'))\n","            target = np.load(os.path.join(save_path, 'y.npy'))\n","            pos = np.load(os.path.join(save_path, 'pos.npy'))\n","            surf = np.load(os.path.join(save_path, 'surf.npy'))\n","            area = np.load(os.path.join(save_path, 'area.npy'))\n","        else:\n","            file_name_press = os.path.join(root, s)\n","\n","            if not os.path.exists(file_name_press):\n","                continue\n","\n","            unstructured_grid_data_press = load_unstructured_grid_data(file_name_press)\n","\n","            scalar_names = [\"Pressure\", \"point_scalars\"]  # 包含可能的标量数据名称\n","            for scalar_name in scalar_names:\n","                press = get_scalar_data(unstructured_grid_data_press, scalar_name)\n","                if press is not None:\n","                    break\n","\n","            points_press = vtk_to_numpy(unstructured_grid_data_press.GetPoints().GetData())\n","\n","            sdf_press = np.zeros(points_press.shape[0])\n","            pos_surf = points_press\n","            sdf_surf = sdf_press\n","            press_surf = press\n","\n","            # 获取当前文件的编号\n","            mesh_number = s[-8:-4]\n","\n","            # 从 data_track_B 目录读取对应的 area 文件\n","            area_file_name = os.path.join(\"data_track_B\", f\"area_{mesh_number}.npy\")\n","\n","            if os.path.exists(area_file_name):\n","                area = np.load(area_file_name)\n","            else:\n","                area = np.zeros(len(pos_surf))  # 如果没有 area 数据，则使用零填充\n","\n","            # 加入 30m/s 的信息，作为单独一列，维度跟节点数一样\n","            info = np.full((len(pos_surf), 1), 30.0)\n","\n","            # 将 pos_surf, sdf_surf, area 和 info 合并到 init_surf\n","            init_surf = np.c_[pos_surf, sdf_surf, area, info]\n","\n","\n","            target_surf = np.c_[np.zeros((len(pos_surf), 3)), press_surf]\n","\n","            surf = np.ones(len(pos_surf))\n","            pos = pos_surf\n","            init = init_surf\n","            target = target_surf\n","\n","            if savedir is not None:\n","                save_path = os.path.join(savedir, s)\n","                if not os.path.exists(save_path):\n","                    os.makedirs(save_path)\n","                np.save(os.path.join(save_path, 'x.npy'), init)\n","                np.save(os.path.join(save_path, 'y.npy'), target)\n","                np.save(os.path.join(save_path, 'pos.npy'), pos)\n","                np.save(os.path.join(save_path, 'surf.npy'), surf)\n","                np.save(os.path.join(save_path, 'area.npy'), area)\n","\n","        surf = torch.tensor(surf)\n","        pos = torch.tensor(pos)\n","        x = torch.tensor(init)\n","        y = torch.tensor(target)\n","\n","        if norm and coef_norm is None:\n","            if k == 0:\n","                old_length = init.shape[0]\n","                mean_in = init.mean(axis=0)\n","                mean_out = target.mean(axis=0)\n","            else:\n","                new_length = old_length + init.shape[0]\n","                mean_in += (init.sum(axis=0) - init.shape[0] * mean_in) / new_length\n","                mean_out += (target.sum(axis=0) - init.shape[0] * mean_out) / new_length\n","                old_length = new_length\n","\n","        data = Data(pos=pos, x=x, y=y, surf=surf.bool())\n","        dataset.append(data)\n","\n","    if norm and coef_norm is None:\n","        for k, data in enumerate(dataset):\n","            if k == 0:\n","                old_length = data.x.numpy().shape[0]\n","                std_in = ((data.x.numpy() - mean_in) ** 2).sum(axis=0) / old_length\n","                std_out = ((data.y.numpy() - mean_out) ** 2).sum(axis=0) / old_length\n","            else:\n","                new_length = old_length + data.x.numpy().shape[0]\n","                std_in += (((data.x.numpy() - mean_in) ** 2).sum(axis=0) - data.x.numpy().shape[0] * std_in) / new_length\n","                std_out += (((data.y.numpy() - mean_out) ** 2).sum(axis=0) - data.x.numpy().shape[0] * std_out) / new_length\n","                old_length = new_length\n","\n","        std_in = np.sqrt(std_in)\n","        std_out = np.sqrt(std_out)\n","\n","        for data in dataset:\n","            data.x = ((data.x - mean_in) / (std_in + 1e-8)).float()\n","            data.y = ((data.y - mean_out) / (std_out + 1e-8)).float()\n","\n","        coef_norm = (mean_in, std_in, mean_out, std_out)\n","        dataset = (dataset, coef_norm)\n","\n","    elif coef_norm is not None:\n","        for data in dataset:\n","            data.x = ((data.x - coef_norm[0]) / (coef_norm[1] + 1e-8)).float()\n","            data.y = ((data.y - coef_norm[2]) / (coef_norm[3] + 1e-8)).float()\n","\n","    return dataset\n","\n","def bc_get_datalist_for_prediction(root, samples, norm=False, coef_norm=None, savedir=None, preprocessed=False):\n","    dataset = []\n","    mean_in, std_in = 0, 0\n","    for k, s in enumerate(tqdm(samples, desc=\"Processing samples\")):\n","        if preprocessed and savedir is not None:\n","            save_path = os.path.join(savedir, s)\n","            if not os.path.exists(save_path):\n","                continue\n","            init = np.load(os.path.join(save_path, 'x.npy'))\n","            pos = np.load(os.path.join(save_path, 'pos.npy'))\n","            surf = np.load(os.path.join(save_path, 'surf.npy'))\n","            area = np.load(os.path.join(save_path, 'area.npy'))\n","        else:\n","            file_name = os.path.join(root, s)\n","\n","            if not os.path.exists(file_name):\n","                continue\n","\n","            unstructured_grid_data = load_unstructured_grid_data(file_name)\n","            points = vtk_to_numpy(unstructured_grid_data.GetPoints().GetData())\n","            sdf = np.zeros(points.shape[0])\n","\n","            pos_surf = points\n","            sdf_surf = sdf\n","\n","            # 获取当前文件的编号（假设文件名中包含编号，形如 mesh_10.vtk）\n","            mesh_number = int(s.split('_')[-1].split('.')[0])\n","\n","            # 从 track_B 目录读取对应的 area 文件\n","            area_file_name = os.path.join(\"track_B\", f\"area_{mesh_number}.npy\")\n","\n","            if os.path.exists(area_file_name):\n","                area = np.load(area_file_name)\n","            else:\n","                area = np.zeros(len(pos_surf))  # 如果没有 area 数据，则使用零填充\n","\n","            # 加入 30m/s 的信息，作为单独一列，维度跟节点数一样\n","            info = np.full((len(pos_surf), 1), 30.0)\n","\n","            # 将 pos_surf, sdf_surf, area 和 info 合并到 init_surf\n","            init_surf = np.c_[pos_surf, sdf_surf, area, info]\n","\n","            surf = np.ones(len(pos_surf))\n","            pos = pos_surf\n","            init = init_surf\n","\n","            if savedir is not None:\n","                save_path = os.path.join(savedir, s)\n","                if not os.path.exists(save_path):\n","                    os.makedirs(save_path)\n","                np.save(os.path.join(save_path, 'x.npy'), init)\n","                np.save(os.path.join(save_path, 'pos.npy'), pos)\n","                np.save(os.path.join(save_path, 'surf.npy'), surf)\n","                np.save(os.path.join(save_path, 'area.npy'), area)\n","\n","        surf = torch.tensor(surf)\n","        pos = torch.tensor(pos)\n","        x = torch.tensor(init)\n","        y = torch.zeros((x.shape[0], 4))  # 创建全零的 y 属性\n","\n","        if norm and coef_norm is None:\n","            if k == 0:\n","                old_length = init.shape[0]\n","                mean_in = init.mean(axis=0)\n","            else:\n","                new_length = old_length + init.shape[0]\n","                mean_in += (init.sum(axis=0) - init.shape[0] * mean_in) / new_length\n","                old_length = new_length\n","        data = Data(pos=pos, x=x, y=y, surf=surf.bool())\n","        dataset.append(data)\n","\n","    if norm and coef_norm is None:\n","        for k, data in enumerate(dataset):\n","            if k == 0:\n","                old_length = data.x.numpy().shape[0]\n","                std_in = ((data.x.numpy() - mean_in) ** 2).sum(axis=0) / old_length\n","            else:\n","                new_length = old_length + data.x.numpy().shape[0]\n","                std_in += (((data.x.numpy() - mean_in) ** 2).sum(axis=0) - data.x.numpy().shape[0] * std_in) / new_length\n","                old_length = new_length\n","\n","        std_in = np.sqrt(std_in)\n","\n","        for data in dataset:\n","            data.x = ((data.x - mean_in) / (std_in + 1e-8)).float()\n","\n","        coef_norm = (mean_in, std_in)\n","\n","    elif coef_norm is not None:\n","        for data in dataset:\n","            data.x = ((data.x - coef_norm[0]) / (coef_norm[1] + 1e-8)).float()\n","\n","    return dataset\n","\n","def get_edges(unstructured_grid_data, points, cell_size=4):\n","    edge_indeces = set()\n","    cells = vtk_to_numpy(unstructured_grid_data.GetCells().GetData()).reshape(-1, cell_size + 1)\n","    for i in range(len(cells)):\n","        for j, k in itertools.product(range(1, cell_size + 1), repeat=2):\n","            edge_indeces.add((cells[i][j], cells[i][k]))\n","            edge_indeces.add((cells[i][k], cells[i][j]))\n","    edges = [[], []]\n","    for u, v in edge_indeces:\n","        edges[0].append(tuple(points[u]))\n","        edges[1].append(tuple(points[v]))\n","    return edges\n","\n","\n","def get_edge_index(pos, edges_press, edges_velo):\n","    indices = {tuple(pos[i]): i for i in range(len(pos))}\n","    edges = set()\n","    for i in range(len(edges_press[0])):\n","        edges.add((indices[edges_press[0][i]], indices[edges_press[1][i]]))\n","    for i in range(len(edges_velo[0])):\n","        edges.add((indices[edges_velo[0][i]], indices[edges_velo[1][i]]))\n","    edge_index = np.array(list(edges)).T\n","    return edge_index\n","\n","\n","def get_induced_graph(data, idx, num_hops):\n","    subset, sub_edge_index, _, _ = k_hop_subgraph(node_idx=idx, num_hops=num_hops, edge_index=data.edge_index,\n","                                                  relabel_nodes=True)\n","    return Data(x=data.x[subset], y=data.y[idx], edge_index=sub_edge_index)\n","\n","\n","def pc_normalize(pc):\n","    centroid = torch.mean(pc, axis=0)\n","    pc = pc - centroid\n","    m = torch.max(torch.sqrt(torch.sum(pc ** 2, axis=1)))\n","    pc = pc / m\n","    return pc\n","\n","\n","def get_shape(data, max_n_point=8192, normalize=True, use_height=False):\n","    surf_indices = torch.where(data.surf)[0].tolist()\n","\n","    if len(surf_indices) > max_n_point:\n","        surf_indices = np.array(random.sample(range(len(surf_indices)), max_n_point))\n","\n","    shape_pc = data.pos[surf_indices].clone()\n","\n","    if normalize:\n","        shape_pc = pc_normalize(shape_pc)\n","\n","    if use_height:\n","        gravity_dim = 1\n","        height_array = shape_pc[:, gravity_dim:gravity_dim + 1] - shape_pc[:, gravity_dim:gravity_dim + 1].min()\n","        shape_pc = torch.cat((shape_pc, height_array), axis=1)\n","\n","    return shape_pc\n","\n","\n","def create_edge_index_radius(data, r, max_neighbors=32):\n","    if isinstance(data, list):\n","        print(\"Error: 'data' is a list, expected 'Data' object.\")\n","        print(\"Data content:\", data)\n","        return None\n","\n","    data.edge_index = nng.radius_graph(x=data.pos, r=r, loop=True, max_num_neighbors=max_neighbors)\n","    return data\n","\n","class GraphDataset(Dataset):\n","    def __init__(self, datalist, use_height=False, use_cfd_mesh=True, r=None):\n","        super().__init__()\n","        self.datalist = datalist\n","        self.use_height = use_height\n","        if not use_cfd_mesh:\n","            assert r is not None\n","            for i in range(len(self.datalist)):\n","                self.datalist[i] = create_edge_index_radius(self.datalist[i], r)\n","\n","    def len(self):\n","        return len(self.datalist)\n","\n","    def get(self, idx):\n","        data = self.datalist[idx]\n","        shape = get_shape(data, use_height=self.use_height)\n","        return self.datalist[idx], shape\n","\n","\n","def get_samples(root):\n","    samples = []\n","    files = os.listdir(root)\n","    for file in files:\n","        if file.endswith('.vtk'):\n","            samples.append(file)\n","    return samples\n","\n","def ac_load_train_val_fold(args, preprocessed):\n","    samples = get_samples(args.a_data_dir)\n","    total_samples = len(samples)\n","\n","    # Shuffle the samples to ensure randomness\n","    np.random.shuffle(samples)\n","\n","    # Split into training and validation sets\n","    trainlst = samples[:450]\n","    vallst = samples[450:500]\n","\n","    # demo test\n","    # trainlst = samples[:2]\n","    # vallst = samples[5:6]\n","\n","    if preprocessed:\n","        print(\"use preprocessed data\")\n","    print(\"loading data\")\n","    train_dataset, coef_norm = acget_datalist(args.a_data_dir, trainlst, norm=True, savedir=args.save_dir,\n","                                            preprocessed=preprocessed)\n","    val_dataset = acget_datalist(args.a_data_dir, vallst, coef_norm=coef_norm, savedir=args.save_dir,\n","                               preprocessed=preprocessed)\n","    print(\"load data finish\")\n","    return train_dataset, val_dataset, coef_norm\n","\n","\n","def bc_load_train_val_fold(args, preprocessed):\n","    samples = get_samples(args.b_data_dir)\n","    total_samples = len(samples)\n","\n","    # Shuffle the samples to ensure randomness\n","    np.random.shuffle(samples)\n","\n","    # Split into training and validation sets\n","    trainlst = samples[:args.train_split]\n","    vallst = samples[args.train_split:args.val_split]\n","\n","    # demo test\n","    # trainlst = samples[:2]\n","    # vallst = samples[5:6]\n","\n","    if preprocessed:\n","        print(\"use preprocessed data\")\n","    print(\"loading data\")\n","    train_dataset, coef_norm = bcget_datalist(args.b_data_dir, trainlst, norm=True, savedir=args.save_dir,\n","                                            preprocessed=preprocessed)\n","    val_dataset = bcget_datalist(args.b_data_dir, vallst, coef_norm=coef_norm, savedir=args.save_dir,\n","                               preprocessed=preprocessed)\n","    print(\"load data finish\")\n","    return train_dataset, val_dataset, coef_norm\n","\n","\n","def ac_load_train_val_fold_file(args, preprocessed, coef_norm):\n","    samples = get_samples(args.a_test_data_dir)\n","    total_samples = len(samples)\n","\n","    # Shuffle the samples to ensure randomness\n","    np.random.shuffle(samples)\n","\n","    # Split into validation sets\n","    vallst = samples[:50]\n","\n","    if preprocessed:\n","        print(\"use preprocessed data\")\n","    print(\"loading data\")\n","    val_dataset = ac_get_datalist_for_prediction(args.a_test_data_dir, vallst, norm=True, savedir=args.save_dir, preprocessed=preprocessed, coef_norm=coef_norm)\n","    print(\"load data finish\")\n","    return val_dataset, vallst\n","\n","def bc_load_train_val_fold_file(args, preprocessed, coef_norm):\n","    samples = get_samples(args.b_test_data_dir)\n","    total_samples = len(samples)\n","\n","    # Shuffle the samples to ensure randomness\n","    np.random.shuffle(samples)\n","\n","    # Split into validation sets\n","    vallst = samples[:50]\n","\n","    if preprocessed:\n","        print(\"use preprocessed data\")\n","    print(\"loading data\")\n","    val_dataset = bc_get_datalist_for_prediction(args.b_test_data_dir, vallst, norm=True, savedir=args.save_dir, preprocessed=preprocessed, coef_norm=coef_norm)\n","    print(\"load data finish\")\n","    return val_dataset, vallst\n"]},{"cell_type":"markdown","metadata":{"id":"dvVWGkc3G1El"},"source":["模型"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"LeL-sPJX4DMO"},"outputs":[],"source":["import torch\n","import numpy as np\n","import torch.nn as nn\n","from timm.models.layers import trunc_normal_\n","from einops import rearrange, repeat\n","import sys\n","\n","ACTIVATION = {'gelu': nn.GELU, 'tanh': nn.Tanh, 'sigmoid': nn.Sigmoid, 'relu': nn.ReLU, 'leaky_relu': nn.LeakyReLU(0.1),\n","              'softplus': nn.Softplus, 'ELU': nn.ELU, 'silu': nn.SiLU}\n","\n","class Physics_Attention_1D(nn.Module):\n","    def __init__(self, dim, heads=8, dim_head=64, dropout=0., slice_num=64):\n","        super().__init__()\n","        inner_dim = dim_head * heads\n","        self.dim_head = dim_head\n","        self.heads = heads\n","        self.scale = dim_head ** -0.5\n","        self.softmax = nn.Softmax(dim=-1)\n","        self.dropout = nn.Dropout(dropout)\n","        self.temperature = nn.Parameter(torch.ones([1, heads, 1, 1]) * 0.5)\n","\n","        self.in_project_x = nn.Linear(dim, inner_dim)\n","        self.in_project_fx = nn.Linear(dim, inner_dim)\n","        self.in_project_slice = nn.Linear(dim_head, slice_num)\n","        for l in [self.in_project_slice]:\n","            torch.nn.init.orthogonal_(l.weight)  # use a principled initialization\n","        self.to_q = nn.Linear(dim_head, dim_head, bias=False)\n","        self.to_k = nn.Linear(dim_head, dim_head, bias=False)\n","        self.to_v = nn.Linear(dim_head, dim_head, bias=False)\n","        self.to_out = nn.Sequential(\n","            nn.Linear(inner_dim, dim),\n","            nn.Dropout(dropout)\n","        )\n","\n","    def forward(self, x):\n","        # B N C\n","        B, N, C = x.shape\n","\n","        ### (1) Slice\n","        # print(x)\n","        # print(x.shape)\n","        # zero_values = (x == 0).sum().item()\n","        # print(f'Number of zero values in x: {zero_values}')\n","        fx_mid = self.in_project_fx(x).reshape(B, N, self.heads, self.dim_head) \\\n","            .permute(0, 2, 1, 3).contiguous()  # B H N C\n","        x_mid = self.in_project_x(x).reshape(B, N, self.heads, self.dim_head) \\\n","            .permute(0, 2, 1, 3).contiguous()  # B H N C\n","\n","        # print(fx_mid)\n","        # print(fx_mid.shape)\n","        # zero_values = (fx_mid == 0).sum().item()\n","        # print(f'Number of zero values in fx: {zero_values}')\n","        # sys.exit(1)\n","\n","        # if torch.isnan(fx_mid).any() or torch.isnan(x_mid).any():\n","        #     print(\"NaN detected after in_project_fx or in_project_x\")\n","        #     print(\"fx_mid:\", fx_mid)\n","        #     print(\"x_mid:\", x_mid)\n","        #     sys.exit(1)\n","        #\n","        # print(fx_mid)\n","        slice_weights = self.softmax(self.in_project_slice(x_mid) / self.temperature)  # B H N G\n","\n","        # if torch.isnan(slice_weights).any():\n","        #     print(\"NaN detected after in_project_slice\")\n","        #     print(\"x_mid:\", x_mid)\n","        #     print(\"slice_weights:\", slice_weights)\n","        #     sys.exit(1)\n","\n","        slice_norm = slice_weights.sum(2)  # B H G\n","        slice_token = torch.einsum(\"bhnc,bhng->bhgc\", fx_mid, slice_weights)\n","        slice_token = slice_token / ((slice_norm + 1e-5)[:, :, :, None].repeat(1, 1, 1, self.dim_head))\n","\n","        # if torch.isnan(slice_token).any():\n","        #     print(\"NaN detected after slicing\")\n","        #     print(\"fx_mid:\", fx_mid)\n","        #     print(\"slice_weights:\", slice_weights)\n","        #     print(\"slice_token:\", slice_token)\n","        #     sys.exit(1)\n","\n","        ### (2) Attention among slice tokens\n","        q_slice_token = self.to_q(slice_token)\n","        k_slice_token = self.to_k(slice_token)\n","        v_slice_token = self.to_v(slice_token)\n","\n","        # if torch.isnan(q_slice_token).any() or torch.isnan(k_slice_token).any() or torch.isnan(v_slice_token).any():\n","        #     print(\"NaN detected in q_slice_token, k_slice_token, or v_slice_token\")\n","        #     print(\"slice_token:\", slice_token)\n","        #     print(\"q_slice_token:\", q_slice_token)\n","        #     print(\"k_slice_token:\", k_slice_token)\n","        #     print(\"v_slice_token:\", v_slice_token)\n","        #     sys.exit(1)\n","\n","        dots = torch.matmul(q_slice_token, k_slice_token.transpose(-1, -2)) * self.scale\n","        attn = self.softmax(dots)\n","        attn = self.dropout(attn)\n","        out_slice_token = torch.matmul(attn, v_slice_token)  # B H G D\n","\n","        # if torch.isnan(out_slice_token).any():\n","        #     print(\"NaN detected in out_slice_token\")\n","        #     print(\"attn:\", attn)\n","        #     print(\"v_slice_token:\", v_slice_token)\n","        #     print(\"out_slice_token:\", out_slice_token)\n","\n","        ### (3) Deslice\n","        out_x = torch.einsum(\"bhgc,bhng->bhnc\", out_slice_token, slice_weights)\n","        out_x = rearrange(out_x, 'b h n d -> b n (h d)')\n","\n","        # if torch.isnan(out_x).any():\n","        #     print(\"NaN detected after deslicing\")\n","        #     print(\"out_slice_token:\", out_slice_token)\n","        #     print(\"slice_weights:\", slice_weights)\n","        #     print(\"out_x:\", out_x)\n","\n","        return self.to_out(out_x)\n","\n","\n","\n","\n","class MLP(nn.Module):\n","    def __init__(self, n_input, n_hidden, n_output, n_layers=1, act='gelu', res=True):\n","        super(MLP, self).__init__()\n","\n","        if act in ACTIVATION.keys():\n","            act = ACTIVATION[act]\n","        else:\n","            raise NotImplementedError\n","        self.n_input = n_input\n","        self.n_hidden = n_hidden\n","        self.n_output = n_output\n","        self.n_layers = n_layers\n","        self.res = res\n","        self.linear_pre = nn.Sequential(nn.Linear(n_input, n_hidden), act())\n","        self.linear_post = nn.Linear(n_hidden, n_output)\n","        self.linears = nn.ModuleList([nn.Sequential(nn.Linear(n_hidden, n_hidden), act()) for _ in range(n_layers)])\n","\n","\n","    def forward(self, x):\n","        x = self.linear_pre(x)\n","\n","        for i in range(self.n_layers):\n","            if self.res:\n","                x = self.linears[i](x) + x\n","            else:\n","                x = self.linears[i](x)\n","\n","        x = self.linear_post(x)\n","\n","        return x\n","\n","\n","class Transolver_block(nn.Module):\n","    \"\"\"Transformer encoder block.\"\"\"\n","\n","    def __init__(\n","            self,\n","            num_heads: int,\n","            hidden_dim: int,\n","            dropout: float,\n","            act='gelu',\n","            mlp_ratio=4,\n","            last_layer=False,\n","            out_dim=1,\n","            slice_num=32,\n","    ):\n","        super().__init__()\n","        self.last_layer = last_layer\n","        self.ln_1 = nn.LayerNorm(hidden_dim)\n","        self.Attn = Physics_Attention_1D(hidden_dim, heads=num_heads, dim_head=hidden_dim // num_heads,\n","                                         dropout=dropout, slice_num=slice_num)\n","        self.ln_2 = nn.LayerNorm(hidden_dim)\n","        self.mlp = MLP(hidden_dim, hidden_dim * mlp_ratio, hidden_dim, n_layers=0, res=False, act=act)\n","        if self.last_layer:\n","            self.ln_3 = nn.LayerNorm(hidden_dim)\n","            self.mlp2 = nn.Linear(hidden_dim, out_dim)\n","\n","    def forward(self, fx):\n","        fx = self.Attn(self.ln_1(fx)) + fx\n","\n","        fx = self.mlp(self.ln_2(fx)) + fx\n","\n","        if self.last_layer:\n","            fx = self.mlp2(self.ln_3(fx))\n","\n","        return fx\n","\n","\n","class Model(nn.Module):\n","    def __init__(self,\n","                 space_dim=1,\n","                 n_layers=5,\n","                 n_hidden=256,\n","                 dropout=0,\n","                 n_head=8,\n","                 act='gelu',\n","                 mlp_ratio=1,\n","                 fun_dim=1,\n","                 out_dim=1,\n","                 slice_num=32,\n","                 ref=8,\n","                 unified_pos=False\n","                 ):\n","        super(Model, self).__init__()\n","        self.__name__ = 'UniPDE_3D'\n","        self.ref = ref\n","        self.unified_pos = unified_pos\n","        if self.unified_pos:\n","            self.preprocess = MLP(fun_dim + self.ref * self.ref * self.ref, n_hidden * 2, n_hidden, n_layers=0,\n","                                  res=False, act=act)\n","        else:\n","            self.preprocess = MLP(fun_dim + space_dim, n_hidden * 2, n_hidden, n_layers=0, res=False, act=act)\n","\n","        self.n_hidden = n_hidden\n","        self.space_dim = space_dim\n","\n","        self.blocks = nn.ModuleList([Transolver_block(num_heads=n_head, hidden_dim=n_hidden,\n","                                                      dropout=dropout,\n","                                                      act=act,\n","                                                      mlp_ratio=mlp_ratio,\n","                                                      out_dim=out_dim,\n","                                                      slice_num=slice_num,\n","                                                      last_layer=(_ == n_layers - 1))\n","                                     for _ in range(n_layers)])\n","        self.initialize_weights()\n","        self.placeholder = nn.Parameter((1 / (n_hidden)) * torch.rand(n_hidden, dtype=torch.float))\n","\n","    def initialize_weights(self):\n","        self.apply(self._init_weights)\n","\n","    def _init_weights(self, m):\n","        if isinstance(m, nn.Linear):\n","            trunc_normal_(m.weight, std=0.02)\n","            if isinstance(m, nn.Linear) and m.bias is not None:\n","                nn.init.constant_(m.bias, 0)\n","        elif isinstance(m, (nn.LayerNorm, nn.BatchNorm1d)):\n","            nn.init.constant_(m.bias, 0)\n","            nn.init.constant_(m.weight, 1.0)\n","\n","    def get_grid(self, my_pos):\n","        # my_pos 1 N 3\n","        batchsize = my_pos.shape[0]\n","\n","        gridx = torch.tensor(np.linspace(-1.5, 1.5, self.ref), dtype=torch.float)\n","        gridx = gridx.reshape(1, self.ref, 1, 1, 1).repeat([batchsize, 1, self.ref, self.ref, 1])\n","        gridy = torch.tensor(np.linspace(0, 2, self.ref), dtype=torch.float)\n","        gridy = gridy.reshape(1, 1, self.ref, 1, 1).repeat([batchsize, self.ref, 1, self.ref, 1])\n","        gridz = torch.tensor(np.linspace(-4, 4, self.ref), dtype=torch.float)\n","        gridz = gridz.reshape(1, 1, 1, self.ref, 1).repeat([batchsize, self.ref, self.ref, 1, 1])\n","        grid_ref = torch.cat((gridx, gridy, gridz), dim=-1).cuda().reshape(batchsize, self.ref ** 3, 3)  # B 4 4 4 3\n","\n","        pos = torch.sqrt(\n","            torch.sum((my_pos[:, :, None, :] - grid_ref[:, None, :, :]) ** 2,\n","                      dim=-1)). \\\n","            reshape(batchsize, my_pos.shape[1], self.ref * self.ref * self.ref).contiguous()\n","        return pos\n","\n","    def forward(self, data):\n","        cfd_data, geom_data = data\n","\n","        x, fx, T = cfd_data.x, None, None\n","        x = x[None, :, :]\n","        if self.unified_pos:\n","            new_pos = self.get_grid(cfd_data.pos[None, :, :])\n","            x = torch.cat((x, new_pos), dim=-1)\n","\n","        if fx is not None:\n","            fx = torch.cat((x, fx), -1)\n","            fx = self.preprocess(fx)\n","\n","        else:\n","            fx = self.preprocess(x)\n","            fx = fx + self.placeholder[None, None, :]\n","\n","\n","        for block in self.blocks:\n","            fx = block(fx)\n","\n","\n","        return fx[0]\n"]},{"cell_type":"markdown","metadata":{},"source":["训练代码"]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":["import numpy as np\n","import time, json, os\n","import torch\n","import torch.nn as nn\n","from torch_geometric.loader import DataLoader\n","from tqdm import tqdm\n","from torch.cuda.amp import autocast, GradScaler\n","\n","def get_nb_trainable_params(model):\n","    '''\n","    Return the number of trainable parameters\n","    '''\n","    model_parameters = filter(lambda p: p.requires_grad, model.parameters())\n","    return sum([np.prod(p.size()) for p in model_parameters])\n","\n","def atrain(device, model, train_loader, optimizer, scheduler, reg=1):\n","    model.train()\n","\n","    criterion_func = nn.MSELoss(reduction='none')  # Using none for element-wise loss\n","    losses_press = []\n","    for cfd_data, geom in train_loader:\n","        cfd_data = cfd_data.to(device)\n","        geom = geom.to(device)\n","        optimizer.zero_grad()\n","        out = model((cfd_data, geom))\n","        targets = cfd_data.y\n","\n","        loss_press = criterion_func(out[cfd_data.surf, -1], targets[cfd_data.surf, -1])\n","        total_loss = reg * loss_press.mean()  # Ensure total_loss is a scalar\n","\n","        total_loss.backward()\n","\n","        optimizer.step()\n","        scheduler.step()\n","\n","        losses_press.append(total_loss.item())\n","\n","    return np.mean(losses_press)\n","\n","@torch.no_grad()\n","def atest(device, model, test_loader):\n","    model.eval()\n","\n","    criterion_func = nn.MSELoss(reduction='none')  # Using none for element-wise loss\n","    losses_press = []\n","    for cfd_data, geom in test_loader:\n","        cfd_data = cfd_data.to(device)\n","        geom = geom.to(device)\n","        out = model((cfd_data, geom))\n","        targets = cfd_data.y\n","\n","        loss_press = criterion_func(out[cfd_data.surf, -1], targets[cfd_data.surf, -1])\n","        total_loss = 1 * loss_press.mean()  # Ensure total_loss is a scalar\n","\n","        losses_press.append(total_loss.item())\n","\n","    return np.mean(losses_press)\n","\n","class NumpyEncoder(json.JSONEncoder):\n","    def default(self, obj):\n","        if isinstance(obj, np.ndarray):\n","            return obj.tolist()\n","        return json.JSONEncoder.default(self, obj)\n","\n","def amain(device, train_dataset, val_dataset, Net, hparams, path, reg=1, val_iter=1, coef_norm=[]):\n","    model = Net.to(device)\n","    optimizer = torch.optim.Adam(model.parameters(), lr=hparams['lr'])\n","    lr_scheduler = torch.optim.lr_scheduler.OneCycleLR(\n","        optimizer,\n","        max_lr=hparams['lr'],\n","        total_steps=(len(train_dataset) // hparams['batch_size'] + 1) * hparams['nb_epochs'],\n","        final_div_factor=1000.,\n","    )\n","    start = time.time()\n","\n","    train_loss, val_loss = 1e5, 1e5\n","    best_val_loss = float('inf')\n","    best_model_path = None\n","\n","    pbar_train = tqdm(range(hparams['nb_epochs']), position=0)\n","    for epoch in pbar_train:\n","        train_loader = DataLoader(train_dataset, batch_size=hparams['batch_size'], shuffle=True, drop_last=True)\n","        loss_press = atrain(device, model, train_loader, optimizer, lr_scheduler, reg=reg)\n","        train_loss = loss_press\n","        del train_loader\n","\n","        if val_iter is not None and (epoch == hparams['nb_epochs'] - 1 or epoch % val_iter == 0):\n","            val_loader = DataLoader(val_dataset, batch_size=1)\n","\n","            loss_press = atest(device, model, val_loader)\n","            val_loss = loss_press\n","            del val_loader\n","\n","            pbar_train.set_postfix(train_loss=train_loss, val_loss=val_loss)\n","\n","            # 保存最优模型\n","            if val_loss < best_val_loss:\n","                best_val_loss = val_loss\n","                best_model_path = os.path.join(f'best_amodel_epoch_{epoch}.pth')\n","                torch.save(model.state_dict(), best_model_path)\n","        else:\n","            pbar_train.set_postfix(train_loss=train_loss)\n","\n","    end = time.time()\n","    time_elapsed = end - start\n","    params_model = get_nb_trainable_params(model).astype('float')\n","    print('Number of parameters:', params_model)\n","    print('Time elapsed: {0:.2f} seconds'.format(time_elapsed))\n","    torch.save(model.state_dict(), os.path.join( f'amodel_{hparams[\"nb_epochs\"]}.pth'))\n","\n","    if val_iter is not None:\n","        with open(os.path.join(f'alog_{hparams[\"nb_epochs\"]}.json'), 'a') as f:\n","            json.dump(\n","                {\n","                    'nb_parameters': params_model,\n","                    'time_elapsed': time_elapsed,\n","                    'hparams': hparams,\n","                    'train_loss': train_loss,\n","                    'val_loss': val_loss,\n","                    'best_val_loss': best_val_loss,\n","                    'best_model_path': best_model_path,\n","                    'coef_norm': list(coef_norm),\n","                }, f, indent=12, cls=NumpyEncoder\n","            )\n","\n","    return model\n","\n","\n","\n","def process_batches(cfd_data, geom, device, model, criterion_func, optimizer=None, scheduler=None, reg=1, scaler=None):\n","    losses = []\n","    cfd_data = cfd_data.to(device)\n","    geom = geom.to(device)\n","    n_samples = cfd_data.x.size(0)\n","    n_subsamples = max(1, int(1 * n_samples))  # 每次处理 10% 的数据\n","\n","    for i in range(0, n_samples, n_subsamples):\n","        end_index = min(i + n_subsamples, n_samples)\n","        sub_indices = torch.arange(i, end_index).long().to(device)\n","\n","        sub_cfd_data = cfd_data.clone()\n","        sub_cfd_data.x = sub_cfd_data.x[sub_indices]\n","        sub_cfd_data.y = sub_cfd_data.y[sub_indices]\n","\n","        if optimizer:\n","            optimizer.zero_grad()\n","        with autocast():  # Using AMP\n","            out = model((sub_cfd_data, geom))\n","            targets = sub_cfd_data.y\n","\n","            # Check for NaN in out and targets\n","            if torch.isnan(out).any():\n","                print(\"NaN detected in model output, skipping batch\")\n","                continue\n","\n","            loss = criterion_func(out[:, -1], targets[:, -1])\n","            loss_mean = loss.mean()\n","\n","\n","        if optimizer:\n","            scaler.scale(loss_mean).backward()\n","            scaler.step(optimizer)\n","            scaler.update()\n","\n","        if scheduler and scheduler._step_count < scheduler.total_steps: # onecycle\n","            scheduler.step()\n","\n","        # if scheduler:\n","            #     scheduler.step(loss_mean)\n","\n","        losses.append(loss_mean.item())\n","\n","    return np.mean(losses) if losses else None\n","\n","\n","def btrain(device, model, train_loader, optimizer, scheduler, scaler, reg=1):\n","    model.train()\n","    criterion_func = nn.MSELoss(reduction='none')  # Using none for element-wise loss\n","    losses_press = []\n","\n","    for cfd_data, geom in train_loader:\n","        loss = process_batches(cfd_data, geom, device, model, criterion_func, optimizer, scheduler, reg=reg,\n","                               scaler=scaler)\n","        if loss is not None:\n","            losses_press.append(loss)\n","\n","    return np.mean(losses_press)\n","\n","\n","@torch.no_grad()\n","def btest(device, model, test_loader):\n","    model.eval()\n","    criterion_func = nn.MSELoss(reduction='none')  # Using none for element-wise loss\n","    losses_press = []\n","\n","    for cfd_data, geom in test_loader:\n","        loss = process_batches(cfd_data, geom, device, model, criterion_func)\n","        if loss is not None:\n","            losses_press.append(loss)\n","\n","    return np.mean(losses_press)\n","\n","\n","\n","def bmain(device, train_dataset, val_dataset, model, hparams, path, reg=1, val_iter=1, coef_norm=[]):\n","    model = model.to(device)\n","    optimizer = torch.optim.Adam(model.parameters(), lr=hparams['lr'])\n","    scaler = GradScaler()  # Initializing the gradient scaler for AMP\n","    lr_scheduler = torch.optim.lr_scheduler.OneCycleLR(\n","        optimizer,\n","        max_lr=hparams['lr'],\n","        total_steps=int((len(train_dataset) // hparams['batch_size'] + 1) * hparams['nb_epochs']),\n","        final_div_factor=1000.,\n","    )\n","\n","    start = time.time()\n","\n","    train_loss, val_loss = 1e5, 1e5\n","    best_val_loss = float('inf')\n","    best_epoch = -1\n","    pbar_train = tqdm(range(hparams['nb_epochs']), position=0)\n","    for epoch in pbar_train:\n","        train_loader = DataLoader(train_dataset, batch_size=hparams['batch_size'], shuffle=True, drop_last=True)\n","        loss_press = btrain(device, model, train_loader, optimizer, lr_scheduler, scaler, reg=reg)\n","        train_loss = loss_press\n","        del train_loader\n","\n","        if val_iter is not None and (epoch == hparams['nb_epochs'] - 1 or epoch % val_iter == 0):\n","            val_loader = DataLoader(val_dataset, batch_size=1)\n","            loss_press = btest(device, model, val_loader)\n","            val_loss = loss_press\n","            del val_loader\n","            pbar_train.set_postfix(train_loss=train_loss, val_loss=val_loss)\n","\n","            # 保存验证集表现最好的模型\n","            if val_loss < best_val_loss:\n","                best_val_loss = val_loss\n","                best_epoch = epoch\n","                best_model_path = os.path.join(f'best_bmodel_epoch_{epoch}.pth')\n","                torch.save(model.state_dict(), best_model_path)\n","        else:\n","            pbar_train.set_postfix(train_loss=train_loss)\n","\n","    end = time.time()\n","    time_elapsed = end - start\n","    params_model = get_nb_trainable_params(model).astype('float')\n","    print('Number of parameters:', params_model)\n","    print('Time elapsed: {0:.2f} seconds'.format(time_elapsed))\n","    final_model_path = os.path.join(f'bmodel_{hparams[\"nb_epochs\"]}.pth')\n","    torch.save(model.state_dict(), final_model_path)\n","\n","    if val_iter is not None:\n","        with open(os.path.join(f'blog_{hparams[\"nb_epochs\"]}.json'), 'a') as f:\n","            json.dump(\n","                {\n","                    'nb_parameters': params_model,\n","                    'time_elapsed': time_elapsed,\n","                    'hparams': hparams,\n","                    'train_loss': train_loss,\n","                    'val_loss': val_loss,\n","                    'best_val_loss': best_val_loss,\n","                    'best_epoch': best_epoch,\n","                    'coef_norm': list(coef_norm),\n","                }, f, indent=12, cls=NumpyEncoder\n","            )\n","\n","    print(f\"Best bmodel saved at epoch {best_epoch} with validation loss {best_val_loss:.4f}\")\n","    return model\n"]},{"cell_type":"markdown","metadata":{"id":"90Bu7fXRftyr"},"source":["## 随机种子锁定"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":567},"executionInfo":{"elapsed":540,"status":"error","timestamp":1720673578157,"user":{"displayName":"pei jian zeng","userId":"06013928868849686113"},"user_tz":-480},"id":"Y_6xjjJIef1f","outputId":"d0d917e8-21ef-4932-887b-528f87cd6314"},"outputs":[{"name":"stdout","output_type":"stream","text":["Namespace(data_dir='data_vtk', test_data_dir='track_A_vtk', save_dir='track_A_preprocessed_data', fold_id=0, gpu=0, cfd_model='Transolver', cfd_mesh=False, r=0.2, weight=0.5, nb_epochs=200, f='/root/.local/share/jupyter/runtime/kernel-8d10cd58-8646-4058-a70f-2398f78a6e52.json')\n","use preprocessed data\n","loading data\n"]},{"name":"stderr","output_type":"stream","text":["Processing samples: 100%|██████████| 450/450 [00:00<00:00, 127486.44it/s]\n","Processing samples: 100%|██████████| 50/50 [00:00<00:00, 70045.16it/s]\n"]},{"name":"stdout","output_type":"stream","text":["load data finish\n","use preprocessed data\n","loading data\n"]},{"name":"stderr","output_type":"stream","text":["Processing samples: 100%|██████████| 50/50 [00:00<00:00, 111550.64it/s]"]},{"name":"stdout","output_type":"stream","text":["load data finish\n"]},{"name":"stderr","output_type":"stream","text":["\n"]},{"ename":"RuntimeError","evalue":"Attempting to deserialize object on a CUDA device but torch.cuda.is_available() is False. If you are running on a CPU-only machine, please use torch.load with map_location=torch.device('cpu') to map your storages to the CPU.","output_type":"error","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mRuntimeError\u001b[0m                              Traceback (most recent call last)","\u001b[0;32m<ipython-input-36-ed877c577538>\u001b[0m in \u001b[0;36m<cell line: 25>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     49\u001b[0m   \u001b[0mval_ds\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mGraphDataset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mval_data\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0muse_cfd_mesh\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcfd_mesh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     50\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 51\u001b[0;31m   \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'model_200.pth'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     52\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     53\u001b[0m   \u001b[0mtest_loader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mDataLoader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mval_ds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36mload\u001b[0;34m(f, map_location, pickle_module, weights_only, mmap, **pickle_load_args)\u001b[0m\n\u001b[1;32m   1023\u001b[0m                     \u001b[0;32mexcept\u001b[0m \u001b[0mRuntimeError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1024\u001b[0m                         \u001b[0;32mraise\u001b[0m \u001b[0mpickle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mUnpicklingError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mUNSAFE_MESSAGE\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1025\u001b[0;31m                 return _load(opened_zipfile,\n\u001b[0m\u001b[1;32m   1026\u001b[0m                              \u001b[0mmap_location\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1027\u001b[0m                              \u001b[0mpickle_module\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36m_load\u001b[0;34m(zip_file, map_location, pickle_module, pickle_file, overall_storage, **pickle_load_args)\u001b[0m\n\u001b[1;32m   1444\u001b[0m     \u001b[0munpickler\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mUnpicklerWrapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata_file\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mpickle_load_args\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1445\u001b[0m     \u001b[0munpickler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpersistent_load\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpersistent_load\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1446\u001b[0;31m     \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0munpickler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1447\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1448\u001b[0m     \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_utils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_validate_loaded_sparse_tensors\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36mpersistent_load\u001b[0;34m(saved_id)\u001b[0m\n\u001b[1;32m   1414\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1415\u001b[0m             \u001b[0mnbytes\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnumel\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_utils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_element_size\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1416\u001b[0;31m             \u001b[0mtyped_storage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mload_tensor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnbytes\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_maybe_decode_ascii\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlocation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1417\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1418\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mtyped_storage\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36mload_tensor\u001b[0;34m(dtype, numel, key, location)\u001b[0m\n\u001b[1;32m   1388\u001b[0m         \u001b[0;31m# stop wrapping with TypedStorage\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1389\u001b[0m         typed_storage = torch.storage.TypedStorage(\n\u001b[0;32m-> 1390\u001b[0;31m             \u001b[0mwrap_storage\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrestore_location\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstorage\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1391\u001b[0m             \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1392\u001b[0m             _internal=True)\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36mdefault_restore_location\u001b[0;34m(storage, location)\u001b[0m\n\u001b[1;32m    388\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mdefault_restore_location\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstorage\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    389\u001b[0m     \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m \u001b[0;32min\u001b[0m \u001b[0m_package_registry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 390\u001b[0;31m         \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstorage\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    391\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    392\u001b[0m             \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36m_cuda_deserialize\u001b[0;34m(obj, location)\u001b[0m\n\u001b[1;32m    263\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_cuda_deserialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    264\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mlocation\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstartswith\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'cuda'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 265\u001b[0;31m         \u001b[0mdevice\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mvalidate_cuda_device\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlocation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    266\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"_torch_load_uninitialized\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    267\u001b[0m             \u001b[0;32mwith\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/torch/serialization.py\u001b[0m in \u001b[0;36mvalidate_cuda_device\u001b[0;34m(location)\u001b[0m\n\u001b[1;32m    247\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    248\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_available\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 249\u001b[0;31m         raise RuntimeError('Attempting to deserialize object on a CUDA '\n\u001b[0m\u001b[1;32m    250\u001b[0m                            \u001b[0;34m'device but torch.cuda.is_available() is False. '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    251\u001b[0m                            \u001b[0;34m'If you are running on a CPU-only machine, '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mRuntimeError\u001b[0m: Attempting to deserialize object on a CUDA device but torch.cuda.is_available() is False. If you are running on a CPU-only machine, please use torch.load with map_location=torch.device('cpu') to map your storages to the CPU."]}],"source":["import numpy as np\n","import torch\n","import random\n","import os\n","import time\n","import json\n","import argparse\n","from torch import nn\n","from tqdm import tqdm\n","from torch_geometric.loader import DataLoader\n","import re\n","\n","\n","# 设置固定种子\n","def set_seed(seed):\n","    np.random.seed(seed)\n","    torch.manual_seed(seed)\n","    random.seed(seed)\n","    if torch.cuda.is_available():\n","        torch.cuda.manual_seed(seed)\n","        torch.cuda.manual_seed_all(seed)\n","    torch.backends.cudnn.deterministic = True\n","    torch.backends.cudnn.benchmark = False\n","\n","\n","set_seed(0)  # 设置种子为0\n","\n","parser = argparse.ArgumentParser()\n","parser.add_argument('--save_dir', default='data_track_C_vtk_preprocessed_data')\n","parser.add_argument('--a_data_dir', default='data_vtk')\n","parser.add_argument('--b_data_dir', default='data_centroid_track_B_vtk')\n","parser.add_argument('--a_test_data_dir', default='track_A_vtk')\n","parser.add_argument('--b_test_data_dir', default='track_B_vtk')\n","parser.add_argument('--fold_id', default=30, type=int)\n","parser.add_argument('--gpu', default=0, type=int)\n","parser.add_argument('--val_iter', default=1, type=int)\n","parser.add_argument('--cfd_model', default='Transolver', type=str)\n","parser.add_argument('--cfd_mesh', action='store_true')\n","parser.add_argument('--r', default=0.2, type=float)\n","parser.add_argument('--alr', default=0.001, type=float)\n","parser.add_argument('--blr', default=0.000001, type=float)\n","parser.add_argument('--weight', default=0.5, type=float)\n","parser.add_argument('--batch_size', default=1, type=int)\n","parser.add_argument('--nb_epochs', default=200, type=int)\n","parser.add_argument('--num_workers', default=16, type=int)  # 添加num_workers参数\n","parser.add_argument('--preprocessed', default=0, type=int)\n","parser.add_argument('--train_split', default=450, type=int)  # 添加训练集划分参数\n","parser.add_argument('--val_split', default=500, type=int)    # 添加验证集划分参数\n","parser.add_argument(\"-f\", help=\"a dummy argument to fool ipython\", default=\"1\")\n","args = parser.parse_args()\n","print(args)\n","\n","ahparams = {'lr': args.alr, 'batch_size': args.batch_size, 'nb_epochs': args.nb_epochs}\n","bhparams = {'lr': args.blr, 'batch_size': args.batch_size, 'nb_epochs': args.nb_epochs, 'num_workers': args.num_workers}\n","\n","n_gpu = torch.cuda.device_count()\n","use_cuda = 0 <= args.gpu < n_gpu and torch.cuda.is_available()\n","device = torch.device(f'cuda:{args.gpu}' if use_cuda else 'cpu')\n","\n","A_train_data, A_val_data, A_coef_norm = ac_load_train_val_fold(args, preprocessed=False)\n","A_train_ds = GraphDataset(A_train_data, use_cfd_mesh=args.cfd_mesh, r=args.r)\n","A_val_ds = GraphDataset(A_val_data, use_cfd_mesh=args.cfd_mesh, r=args.r)\n","\n","\n","\n","# 创建模型实例并加载权重\n","amodel = Model(n_hidden=256, n_layers=40, space_dim=7,\n","            fun_dim=0,\n","            n_head=8,\n","            mlp_ratio=2, out_dim=4,\n","            slice_num=512,\n","            unified_pos=False).to(device)\n","\n","amodel = amain(device, A_train_ds, A_val_ds, amodel, ahparams, path=None, val_iter=args.val_iter, reg=args.weight,\n","                   coef_norm=A_coef_norm)\n","\n","B_train_data, B_val_data, B_coef_norm = bc_load_train_val_fold(args, preprocessed=False)\n","B_train_ds = GraphDataset(B_train_data, use_cfd_mesh=args.cfd_mesh, r=args.r)\n","B_val_ds = GraphDataset(B_val_data, use_cfd_mesh=args.cfd_mesh, r=args.r)\n","\n","\n","bmodel = Model(n_hidden=256, n_layers=8, space_dim=6,\n","                  fun_dim=0,\n","                  n_head=8,\n","                  act='gelu',\n","                  mlp_ratio=2, out_dim=4,\n","                  slice_num=32,\n","                  unified_pos=False).to(device)\n","\n","bmodel = bmain(device, B_train_ds, B_val_ds, bmodel, bhparams, path=None, val_iter=args.val_iter, reg=args.weight,\n","                   coef_norm=B_coef_norm)\n","\n","a_val_data, a_vallst = ac_load_train_val_fold_file(args, preprocessed=False, coef_norm=A_coef_norm)\n","a_val_ds = GraphDataset(a_val_data, use_cfd_mesh=args.cfd_mesh, r=args.r)\n","b_val_data, b_vallst = bc_load_train_val_fold_file(args, preprocessed=False, coef_norm=B_coef_norm)\n","b_val_ds = GraphDataset(b_val_data, use_cfd_mesh=args.cfd_mesh, r=args.r)\n","\n","amodel_dir = '.'\n","amodel_files = [f for f in os.listdir(amodel_dir) if f.startswith('best_amodel_epoch_') and f.endswith('.pth')]\n","amax_epoch = -1\n","amax_epoch_file = None\n","\n","# 遍历文件，找到最大的 epoch 编号\n","for file in amodel_files:\n","    # 提取 epoch 编号\n","    epoch = int(file.split('_')[-1].split('.')[0])\n","    if epoch > amax_epoch:\n","        amax_epoch = epoch\n","        amax_epoch_file = file\n","\n","\n","if amax_epoch_file is not None:\n","    amodel_path = os.path.join(amodel_dir, amax_epoch_file)\n","    amodel.load_state_dict(torch.load(amodel_path))\n","    amodel.to(device)\n","\n","\n","bmodel_dir = '.'\n","bmodel_files = [f for f in os.listdir(bmodel_dir) if f.startswith('best_bmodel_epoch_') and f.endswith('.pth')]\n","bmax_epoch = -1\n","bmax_epoch_file = None\n","\n","# 遍历文件，找到最大的 epoch 编号\n","for file in bmodel_files:\n","    # 提取 epoch 编号\n","    epoch = int(file.split('_')[-1].split('.')[0])\n","    if epoch > bmax_epoch:\n","        bmax_epoch = epoch\n","        bmax_epoch_file = file\n","\n","\n","if bmax_epoch_file is not None:\n","    bmodel_path = os.path.join(bmodel_dir, bmax_epoch_file)\n","    bmodel.load_state_dict(torch.load(bmodel_path))\n","    bmodel.to(device)\n","\n","\n","a_test_loader = DataLoader(a_val_ds, batch_size=1)\n","b_test_loader = DataLoader(b_val_ds, batch_size=1)\n","\n","if not os.path.exists('./results/' + args.cfd_model + '_C/'):\n","    os.makedirs('./results/' + args.cfd_model + '_C/')\n","\n","with torch.no_grad():\n","    amodel.eval()\n","    times_a = []\n","    index_a = 0\n","    for cfd_data, geom in a_test_loader:\n","        # 提取 vallst 中的文件名并获取后三位数字\n","        a_mesh_file = a_vallst[index_a]\n","        a_match = re.search(r'mesh_(\\d{3})\\.vtk', a_mesh_file)\n","        if a_match:\n","            a_mesh_index = a_match.group(1)\n","        else:\n","            raise ValueError(f\"Invalid mesh file format: {a_mesh_file}\")\n","\n","        # print(mesh_file)\n","        cfd_data = cfd_data.to(device)\n","        geom = geom.to(device)\n","        tic = time.time()\n","        out = amodel((cfd_data, geom))\n","        toc = time.time()\n","\n","        # 提取press数据\n","        press_output = out[cfd_data.surf, -1]\n","\n","        # 反归一化\n","        if A_coef_norm is not None:\n","            mean_out = torch.tensor(A_coef_norm[2]).to(device)\n","            std_out = torch.tensor(A_coef_norm[3]).to(device)\n","            press_output = press_output * std_out[-1] + mean_out[-1]\n","\n","        # 转换为numpy数组并保存\n","        press_output = press_output.detach().cpu().numpy()\n","        np.save('./results/' + args.cfd_model + '_C/' + 'press' + '_' + f'{a_mesh_index}.npy', press_output)\n","\n","        times_a.append(toc - tic)\n","        index_a += 1\n","\n","    print('time:', np.mean(times_a))\n","\n","with torch.no_grad():\n","    bmodel.eval()\n","    times_b = []\n","    index_b = 0\n","    for cfd_data, geom in b_test_loader:\n","        # 提取 vallst 中的文件名并获取数字\n","        b_mesh_file = b_vallst[index_b]\n","        b_match = re.search(r'mesh_(\\d+)\\.vtk', b_mesh_file)  # 修改正则表达式\n","        if b_match:\n","            b_mesh_index = b_match.group(1)\n","            print(f\"Processing mesh index: {b_mesh_index}\")\n","        else:\n","            raise ValueError(f\"Invalid mesh file format: {b_mesh_file}\")\n","        # print(mesh_file)\n","        cfd_data = cfd_data.to(device)\n","        geom = geom.to(device)\n","        tic = time.time()\n","        out = bmodel((cfd_data, geom))\n","        toc = time.time()\n","\n","        # 提取press数据\n","        press_output = out[cfd_data.surf, -1]\n","\n","        # 反归一化\n","        if A_coef_norm is not None:\n","            mean_out = torch.tensor(A_coef_norm[2]).to(device)\n","            std_out = torch.tensor(A_coef_norm[3]).to(device)\n","            press_output = press_output * std_out[-1] + mean_out[-1]\n","\n","        # 转换为numpy数组并保存\n","        press_output = press_output.detach().cpu().numpy()\n","        np.save('./results/' + args.cfd_model + '_C/' + 'press' + '_' + f'{b_mesh_index}.npy', press_output)\n","\n","        times_b.append(toc - tic)\n","        index_b += 1\n","\n","    print('time:', np.mean(times_b))"]},{"cell_type":"markdown","metadata":{"id":"MBCalHN7bBji"},"source":["# 以C榜为例的输出规范\n","(将答案文件夹压缩为B_result.zip,用于被程序识别，假设文件路径是Example_C)"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"_R4ymI9BcKYb","outputId":"d11335d3-d016-48bc-ce76-daa035fe8503"},"outputs":[{"name":"stdout","output_type":"stream","text":["  adding: Example_C/ (stored 0%)\n","  adding: Example_C/press_702.npy (deflated 5%)\n","  adding: Example_C/press_709.npy (deflated 5%)\n","  adding: Example_C/press_712.npy (deflated 5%)\n","  adding: Example_C/press_13.npy (deflated 5%)\n","  adding: Example_C/press_718.npy (deflated 5%)\n","  adding: Example_C/press_42.npy (deflated 5%)\n","  adding: Example_C/press_35.npy (deflated 5%)\n","  adding: Example_C/press_5.npy (deflated 5%)\n","  adding: Example_C/press_21.npy (deflated 5%)\n","  adding: Example_C/press_692.npy (deflated 5%)\n","  adding: Example_C/press_17.npy (deflated 5%)\n","  adding: Example_C/press_711.npy (deflated 5%)\n","  adding: Example_C/press_686.npy (deflated 5%)\n","  adding: Example_C/press_666.npy (deflated 5%)\n","  adding: Example_C/press_12.npy (deflated 5%)\n","  adding: Example_C/press_665.npy (deflated 5%)\n","  adding: Example_C/press_27.npy (deflated 5%)\n","  adding: Example_C/press_39.npy (deflated 5%)\n","  adding: Example_C/press_672.npy (deflated 5%)\n","  adding: Example_C/press_32.npy (deflated 5%)\n","  adding: Example_C/press_19.npy (deflated 5%)\n","  adding: Example_C/press_31.npy (deflated 5%)\n","  adding: Example_C/press_11.npy (deflated 5%)\n","  adding: Example_C/press_660.npy (deflated 5%)\n","  adding: Example_C/press_667.npy (deflated 5%)\n","  adding: Example_C/press_677.npy (deflated 5%)\n","  adding: Example_C/press_40.npy (deflated 5%)\n","  adding: Example_C/press_663.npy (deflated 5%)\n","  adding: Example_C/press_659.npy (deflated 5%)\n","  adding: Example_C/press_684.npy (deflated 5%)\n","  adding: Example_C/press_23.npy (deflated 5%)\n","  adding: Example_C/press_41.npy (deflated 5%)\n","  adding: Example_C/press_675.npy (deflated 5%)\n","  adding: Example_C/press_22.npy (deflated 5%)\n","  adding: Example_C/press_721.npy (deflated 5%)\n","  adding: Example_C/press_705.npy (deflated 5%)\n","  adding: Example_C/press_9.npy (deflated 5%)\n","  adding: Example_C/press_28.npy (deflated 5%)\n","  adding: Example_C/press_701.npy (deflated 5%)\n","  adding: Example_C/press_25.npy (deflated 5%)\n","  adding: Example_C/press_719.npy (deflated 5%)\n","  adding: Example_C/press_697.npy (deflated 5%)\n","  adding: Example_C/press_47.npy (deflated 5%)\n","  adding: Example_C/press_695.npy (deflated 5%)\n","  adding: Example_C/press_4.npy (deflated 5%)\n","  adding: Example_C/press_37.npy (deflated 5%)\n","  adding: Example_C/press_16.npy (deflated 5%)\n","  adding: Example_C/press_30.npy (deflated 5%)\n","  adding: Example_C/press_18.npy (deflated 5%)\n","  adding: Example_C/press_15.npy (deflated 5%)\n","  adding: Example_C/press_676.npy (deflated 5%)\n","  adding: Example_C/press_681.npy (deflated 5%)\n","  adding: Example_C/press_49.npy (deflated 5%)\n","  adding: Example_C/press_36.npy (deflated 5%)\n","  adding: Example_C/press_45.npy (deflated 5%)\n","  adding: Example_C/press_688.npy (deflated 5%)\n","  adding: Example_C/press_44.npy (deflated 5%)\n","  adding: Example_C/press_710.npy (deflated 5%)\n","  adding: Example_C/press_20.npy (deflated 5%)\n","  adding: Example_C/press_689.npy (deflated 5%)\n","  adding: Example_C/press_696.npy (deflated 5%)\n","  adding: Example_C/press_683.npy (deflated 5%)\n","  adding: Example_C/press_708.npy (deflated 5%)\n","  adding: Example_C/press_673.npy (deflated 5%)\n","  adding: Example_C/press_1.npy (deflated 5%)\n","  adding: Example_C/press_668.npy (deflated 5%)\n","  adding: Example_C/press_34.npy (deflated 5%)\n","  adding: Example_C/press_48.npy (deflated 5%)\n","  adding: Example_C/press_700.npy (deflated 5%)\n","  adding: Example_C/press_662.npy (deflated 5%)\n","  adding: Example_C/press_722.npy (deflated 5%)\n","  adding: Example_C/press_10.npy (deflated 5%)\n","  adding: Example_C/press_690.npy (deflated 5%)\n","  adding: Example_C/press_43.npy (deflated 5%)\n","  adding: Example_C/press_6.npy (deflated 5%)\n","  adding: Example_C/press_674.npy (deflated 5%)\n","  adding: Example_C/press_717.npy (deflated 5%)\n","  adding: Example_C/press_24.npy (deflated 5%)\n","  adding: Example_C/press_687.npy (deflated 5%)\n","  adding: Example_C/press_8.npy (deflated 5%)\n","  adding: Example_C/press_664.npy (deflated 5%)\n","  adding: Example_C/press_50.npy (deflated 5%)\n","  adding: Example_C/press_26.npy (deflated 5%)\n","  adding: Example_C/press_715.npy (deflated 5%)\n","  adding: Example_C/press_38.npy (deflated 5%)\n","  adding: Example_C/press_713.npy (deflated 5%)\n","  adding: Example_C/press_7.npy (deflated 5%)\n","  adding: Example_C/press_14.npy (deflated 5%)\n","  adding: Example_C/press_703.npy (deflated 5%)\n","  adding: Example_C/press_693.npy (deflated 5%)\n","  adding: Example_C/press_2.npy (deflated 5%)\n","  adding: Example_C/press_691.npy (deflated 5%)\n","  adding: Example_C/press_3.npy (deflated 5%)\n","  adding: Example_C/press_33.npy (deflated 5%)\n","  adding: Example_C/press_46.npy (deflated 5%)\n","  adding: Example_C/press_679.npy (deflated 5%)\n","  adding: Example_C/press_704.npy (deflated 5%)\n","  adding: Example_C/press_678.npy (deflated 5%)\n","  adding: Example_C/press_29.npy (deflated 5%)\n","  adding: Example_C/press_658.npy (deflated 5%)\n"]}],"source":["!zip -r B_result.zip ./results/Transolver_C"]}],"metadata":{"colab":{"provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0}
