poca-SoccerTwos / run_logs /timers.json
nidek's picture
first push
ee9896f
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.4363939762115479,
"min": 1.3503320217132568,
"max": 3.2957546710968018,
"count": 5000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 28681.916015625,
"min": 23196.216796875,
"max": 124502.9140625,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 64.98684210526316,
"min": 38.22047244094488,
"max": 999.0,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19756.0,
"min": 15244.0,
"max": 26476.0,
"count": 5000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1648.8015793099883,
"min": 1196.0437918589532,
"max": 1686.2468181253266,
"count": 4999
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 250617.8400551182,
"min": 2394.311373924607,
"max": 389700.5828417282,
"count": 4999
},
"SoccerTwos.Step.mean": {
"value": 49999976.0,
"min": 9254.0,
"max": 49999976.0,
"count": 5000
},
"SoccerTwos.Step.sum": {
"value": 49999976.0,
"min": 9254.0,
"max": 49999976.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.01695444993674755,
"min": -0.132838636636734,
"max": 0.16042351722717285,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -2.560122013092041,
"min": -24.30330467224121,
"max": 31.2825870513916,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.019264034926891327,
"min": -0.13162541389465332,
"max": 0.15800507366657257,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -2.9088692665100098,
"min": -24.16169548034668,
"max": 30.810989379882812,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.015650334342426023,
"min": -0.5294117647058824,
"max": 0.46835555964046055,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -2.3632004857063293,
"min": -66.39160001277924,
"max": 52.063599705696106,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.015650334342426023,
"min": -0.5294117647058824,
"max": 0.46835555964046055,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -2.3632004857063293,
"min": -66.39160001277924,
"max": 52.063599705696106,
"count": 5000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.020285495067946614,
"min": 0.009509219172468875,
"max": 0.02645891107774029,
"count": 2426
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.020285495067946614,
"min": 0.009509219172468875,
"max": 0.02645891107774029,
"count": 2426
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10916803305347761,
"min": 0.0011797005446472517,
"max": 0.12795470307270687,
"count": 2426
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10916803305347761,
"min": 0.0011797005446472517,
"max": 0.12795470307270687,
"count": 2426
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.11020258565743764,
"min": 0.001188150733166064,
"max": 0.13052797242999076,
"count": 2426
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.11020258565743764,
"min": 0.001188150733166064,
"max": 0.13052797242999076,
"count": 2426
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2426
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2426
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2426
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2426
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2426
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2426
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1676241851",
"python_version": "3.9.16 (main, Jan 11 2023, 16:05:54) \n[GCC 11.2.0]",
"command_line_arguments": "/home/konrad/miniconda3/envs/rl_ai_vs_ai/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1676307613"
},
"total": 65761.664129426,
"count": 1,
"self": 0.2691828070237534,
"children": {
"run_training.setup": {
"total": 0.00772888699430041,
"count": 1,
"self": 0.00772888699430041
},
"TrainerController.start_learning": {
"total": 65761.38721773199,
"count": 1,
"self": 68.87260482228885,
"children": {
"TrainerController._reset_env": {
"total": 6.108942669961834,
"count": 250,
"self": 6.108942669961834
},
"TrainerController.advance": {
"total": 65686.22484355074,
"count": 3457920,
"self": 65.07742408150807,
"children": {
"env_step": {
"total": 46744.5257107698,
"count": 3457920,
"self": 34186.372190445996,
"children": {
"SubprocessEnvManager._take_step": {
"total": 12520.238155457206,
"count": 3457920,
"self": 333.48648220101313,
"children": {
"TorchPolicy.evaluate": {
"total": 12186.751673256193,
"count": 6277096,
"self": 12186.751673256193
}
}
},
"workers": {
"total": 37.915364866596065,
"count": 3457920,
"self": 0.0,
"children": {
"worker_root": {
"total": 65677.59810379175,
"count": 3457920,
"is_parallel": true,
"self": 38147.21769326538,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0019695220107678324,
"count": 2,
"is_parallel": true,
"self": 0.0005542150029214099,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014153070078464225,
"count": 8,
"is_parallel": true,
"self": 0.0014153070078464225
}
}
},
"UnityEnvironment.step": {
"total": 0.020286636005039327,
"count": 1,
"is_parallel": true,
"self": 0.0005079609982203692,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0003870410000672564,
"count": 1,
"is_parallel": true,
"self": 0.0003870410000672564
},
"communicator.exchange": {
"total": 0.017914305004524067,
"count": 1,
"is_parallel": true,
"self": 0.017914305004524067
},
"steps_from_proto": {
"total": 0.0014773290022276342,
"count": 2,
"is_parallel": true,
"self": 0.0003302739787613973,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011470550234662369,
"count": 8,
"is_parallel": true,
"self": 0.0011470550234662369
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 27529.993976432233,
"count": 3457919,
"is_parallel": true,
"self": 1644.5499559181917,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 1065.2899068473198,
"count": 3457919,
"is_parallel": true,
"self": 1065.2899068473198
},
"communicator.exchange": {
"total": 20114.62883969328,
"count": 3457919,
"is_parallel": true,
"self": 20114.62883969328
},
"steps_from_proto": {
"total": 4705.525273973442,
"count": 6915838,
"is_parallel": true,
"self": 1022.7446160347754,
"children": {
"_process_rank_one_or_two_observation": {
"total": 3682.780657938667,
"count": 27663352,
"is_parallel": true,
"self": 3682.780657938667
}
}
}
}
},
"steps_from_proto": {
"total": 0.386434094136348,
"count": 498,
"is_parallel": true,
"self": 0.08291502288193442,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.3035190712544136,
"count": 1992,
"is_parallel": true,
"self": 0.3035190712544136
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 18876.62170869943,
"count": 3457920,
"self": 409.5243336641288,
"children": {
"process_trajectory": {
"total": 7357.342531291666,
"count": 3457920,
"self": 7338.3734092125815,
"children": {
"RLTrainer._checkpoint": {
"total": 18.969122079084627,
"count": 100,
"self": 18.969122079084627
}
}
},
"_update_policy": {
"total": 11109.754843743634,
"count": 2426,
"self": 6401.967426916919,
"children": {
"TorchPOCAOptimizer.update": {
"total": 4707.787416826715,
"count": 72780,
"self": 4707.787416826715
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.360075298696756e-07,
"count": 1,
"self": 8.360075298696756e-07
},
"TrainerController._save_models": {
"total": 0.1808258529927116,
"count": 1,
"self": 0.0011467889999039471,
"children": {
"RLTrainer._checkpoint": {
"total": 0.17967906399280764,
"count": 1,
"self": 0.17967906399280764
}
}
}
}
}
}
}