jkulhanek's picture
Add TT dataset
2ce14fe
raw
history blame
No virus
5.17 kB
{
"render_datetime": "2024-06-20T14:13:53",
"render_version": "0.0.16.dev63+g7840983.d20240612",
"render_dataset_metadata": {
"name": "tanksandtemples",
"color_space": "srgb",
"evaluation_protocol": "default",
"viewer_transform": [
[
0.215291,
0.005863,
-0.006118,
0.01223
],
[
0.005863,
0.009227,
0.21518,
-0.000686
],
[
0.006118,
-0.21518,
0.00906,
0.001321
]
],
"viewer_initial_pose": [
[
-0.490935,
-0.023193,
-0.870887,
0.704811
],
[
0.870957,
-0.036515,
-0.490001,
0.166829
],
[
-0.020436,
-0.999064,
0.038127,
-0.001734
]
],
"scene": "temple",
"downscale_factor": 2,
"expected_scene_scale": 4.451097,
"type": null
},
"nb_info": {
"method": "gaussian-splatting",
"num_iterations": 30000,
"total_train_time": 668.99082,
"resources_utilization": {
"memory": 4654,
"gpu_memory": 6374,
"gpu_name": "NVIDIA A100-SXM4-40GB"
},
"datetime": "2024-06-04T17:09:53+0200",
"config_overrides": {},
"dataset_metadata": {
"name": "tanksandtemples",
"color_space": "srgb",
"evaluation_protocol": "default",
"viewer_transform": [
[
0.215291,
0.005863,
-0.006118,
0.01223
],
[
0.005863,
0.009227,
0.21518,
-0.000686
],
[
0.006118,
-0.21518,
0.00906,
0.001321
]
],
"viewer_initial_pose": [
[
-0.490935,
-0.023193,
-0.870887,
0.704811
],
[
0.870957,
-0.036515,
-0.490001,
0.166829
],
[
-0.020436,
-0.999064,
0.038127,
-0.001734
]
],
"scene": "temple",
"downscale_factor": 2,
"expected_scene_scale": 4.451097,
"type": null
},
"evaluation_protocol": "default",
"nb_version": "0.0.16.dev56+g8ee0779.d20240531",
"hparams": {
"sh_degree": 3,
"white_background": false,
"scale_coords": null,
"iterations": 30000,
"position_lr_init": 0.00016,
"position_lr_final": 1.6e-06,
"position_lr_delay_mult": 0.01,
"position_lr_max_steps": 30000,
"feature_lr": 0.0025,
"opacity_lr": 0.05,
"scaling_lr": 0.005,
"rotation_lr": 0.001,
"percent_dense": 0.01,
"lambda_dssim": 0.2,
"densification_interval": 100,
"opacity_reset_interval": 3000,
"densify_from_iter": 500,
"densify_until_iter": 15000,
"densify_grad_threshold": 0.0002,
"random_background": false,
"convert_SHs_python": false,
"compute_cov3D_python": false,
"debug": false
},
"checkpoint_sha256": "7f1458728af7978d0975f1723f386ba90f18a83ea92bd35622e8264f5e14ecb3"
},
"evaluate_datetime": "2024-06-20T14:14:26",
"evaluate_version": "0.0.16.dev63+g7840983.d20240612",
"metrics": {
"psnr": 27.51376,
"ssim": 0.85235,
"mae": 0.0246,
"mse": 0.00193,
"lpips": 0.16024
},
"metrics_raw": {
"psnr": "DHLQQVI31kEMCslBoD+/Qdxo3EHEXeJB9p/rQTL81UHsFNxBOAfZQc002kEMO9NB387iQXXC0kHHs89BSCTUQTtS2kE/2dxBMcrvQcao7kF4AdFB67LPQR6jx0EEDd1BEd3VQeQ52UGCxM9ByqDMQZ5EzUHjUcpBoxniQWDnwkEUUtVBrtL1QdHL3kFIGvdBXCXeQeqk70G2p+hBBxnfQeTS3kFiFtBBhqDkQRv29kFAY99B3gzkQZhczUE3l9BBIg/eQXBM50E+N/5Bq6UAQg==",
"ssim": "PBVaP3LcYD8LEE4/fT5QP4I+Xz8sMWg/SRJvP70rXT9Xqkw/CyBLP527Tz9GQjQ/mvJDP9YgUT8PDkw/h3hNP6ByXz8WxV8/5vhcP417VT+MkkY/ma1UPxFaUz/7LmE/2vNcP0UiUD8Mr2Q/SqZhP7P4Wz9Q+Fg/G4NqP9nPUT+pmVY/ERBpP1lTWD+88mE/6KBHP1U2bz9IPWw/DzlnPzKvWz+WBVY/wDRmP2QqZz/z4mo/vg9uPxYWTj+FvFE/P8VSP7idWT8mnWI/avBpPw==",
"mae": "grfmPJArzTxr+wI9JtYBPYPGzTxuma08O7WKPH9//DzWDtU874fJPDA91Tzjl+w8mATVPJ7F1zyt5NU8RWTIPHJkwTz53Kc8CpeSPFSvqDwcedk8te3KPF1I8zxhzLI8XgXYPCUi4zwu5As9qhMiPcKz1jyK3Ac9eMykPIvQAT2HCd08bPODPPAR1jzmGpM8/T/ZPAk9jDyFKI88xUy3PDTT1jzEquA8KIavPGyFfTyOL4g8V7SsPAL1+jyPRQE96Qe8PB2orTyfu388Yy9xPA==",
"mse": "i4UiO5ymCTvHIkk71U2FO9NZ5jrbDsI6a6mUOqGRCjv0iOg6oOX9OrBu9Tp6ABY7G5u/OqgMGDsrCSY7nR4SOx6f9DpedeM6Cd6DOpg6iDoK7B87Rg0mOyhrUTtyI+I6AA4LO190/DpJuSU7f2U1O/kVMjtj20E714zDOuT7bzumPg07ibFdOigP1zorrVU6Mx/bOtFrhDp57aE6SDPVOmDj1jqONCQ7I9W1OhuMVjq0bdM6W+C4OkWbMTsL2CE7javbOgFgqDoZHi46fFofOg==",
"lpips": "xDIgPttv3T3B/ko+Xu5CPgbnHj6dm9A9fNGoPRLZIT5/+FU+NM2BPq7tTD50Bbw+UD+vPoz4ej6CKnI+iWuBPn1JBj5Uowo+eaMdPvwOMj4mmoM+skY7Phi0Jj7NFgE+gKoAPmFZiD50rq09BvK7PeDQBD7zqPs967iiPePkKz46qQI+WyuZPZT3Gj7UKwU+EjuVPl6mnT3wjYA9YBvHPe0LHT4/kxs+grqnPXb78j3BqrY93M+xPSkdXj6t10w+oug/PgYTFD6o3Qk+OvTYPQ=="
},
"metrics_sha256": "573bc6a4907d8b4f405eff3597dcf3c46335d57b19ed8ff416c58bfe7ad8c319",
"predictions_sha256": "99d4437e0a7c80b61c401d073f6d608f1ce3086479b96eb9ab14faec59f497b0",
"ground_truth_sha256": "b596b39214a9a5155b525245f8be589489eab69bcce0a26ec0f5ab276e50829b",
"evaluation_protocol": "default"
}