poca-SoccerTwos / run_logs /timers.json
dana11235's picture
Hi
3823194 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.9081424474716187,
"min": 1.8569482564926147,
"max": 3.2957239151000977,
"count": 1696
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 37857.546875,
"min": 12016.7900390625,
"max": 162516.515625,
"count": 1696
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 102.60416666666667,
"min": 71.71014492753623,
"max": 999.0,
"count": 1696
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19700.0,
"min": 16304.0,
"max": 23620.0,
"count": 1696
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1480.762717569772,
"min": 1198.49454273255,
"max": 1513.5470795141598,
"count": 1052
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 142153.2208866981,
"min": 2398.4672776531243,
"max": 206908.54901103445,
"count": 1052
},
"SoccerTwos.Step.mean": {
"value": 16959950.0,
"min": 9666.0,
"max": 16959950.0,
"count": 1696
},
"SoccerTwos.Step.sum": {
"value": 16959950.0,
"min": 9666.0,
"max": 16959950.0,
"count": 1696
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.042484100908041,
"min": -0.1209673285484314,
"max": 0.08432997763156891,
"count": 1696
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -4.0784735679626465,
"min": -14.758013725280762,
"max": 6.972206115722656,
"count": 1696
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.04343920946121216,
"min": -0.12238497287034988,
"max": 0.08571578562259674,
"count": 1696
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -4.170164108276367,
"min": -14.9309663772583,
"max": 7.411514759063721,
"count": 1696
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1696
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1696
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.07420416610936324,
"min": -0.6232181841676886,
"max": 0.43226086575052014,
"count": 1696
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -7.123599946498871,
"min": -45.98939996957779,
"max": 33.95600014925003,
"count": 1696
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.07420416610936324,
"min": -0.6232181841676886,
"max": 0.43226086575052014,
"count": 1696
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -7.123599946498871,
"min": -45.98939996957779,
"max": 33.95600014925003,
"count": 1696
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1696
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1696
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.015667513432951332,
"min": 0.010533121827271922,
"max": 0.02685099102091044,
"count": 798
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.015667513432951332,
"min": 0.010533121827271922,
"max": 0.02685099102091044,
"count": 798
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.07109390248854955,
"min": 3.922223305400744e-10,
"max": 0.07932198345661164,
"count": 798
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.07109390248854955,
"min": 3.922223305400744e-10,
"max": 0.07932198345661164,
"count": 798
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.07201107839743297,
"min": 5.209304672811517e-10,
"max": 0.0802318458755811,
"count": 798
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.07201107839743297,
"min": 5.209304672811517e-10,
"max": 0.0802318458755811,
"count": 798
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 798
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 798
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 798
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 798
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 798
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 798
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1706074009",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\ProgramData\\miniconda3\\envs\\rl\\Scripts\\mlagents-learn .\\config\\poca\\SoccerTwos.yaml --env=.\\SoccerTwos\\SoccerTwos\\SoccerTwos.exe --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.2+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1706109931"
},
"total": 35920.1144813,
"count": 1,
"self": 1.6802807000203757,
"children": {
"run_training.setup": {
"total": 0.07653599999321159,
"count": 1,
"self": 0.07653599999321159
},
"TrainerController.start_learning": {
"total": 35918.357664599986,
"count": 1,
"self": 13.76135550509207,
"children": {
"TrainerController._reset_env": {
"total": 6.697737099922961,
"count": 85,
"self": 6.697737099922961
},
"TrainerController.advance": {
"total": 35897.799485494965,
"count": 1116096,
"self": 14.068040496305912,
"children": {
"env_step": {
"total": 12341.891317795002,
"count": 1116096,
"self": 9352.766301088806,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2980.022104709584,
"count": 1116096,
"self": 97.6170436139655,
"children": {
"TorchPolicy.evaluate": {
"total": 2882.4050610956183,
"count": 2165450,
"self": 2882.4050610956183
}
}
},
"workers": {
"total": 9.10291199661151,
"count": 1116096,
"self": 0.0,
"children": {
"worker_root": {
"total": 35875.48386889631,
"count": 1116096,
"is_parallel": true,
"self": 28539.939800499997,
"children": {
"steps_from_proto": {
"total": 0.10554500001308043,
"count": 170,
"is_parallel": true,
"self": 0.02169160004996229,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.08385339996311814,
"count": 680,
"is_parallel": true,
"self": 0.08385339996311814
}
}
},
"UnityEnvironment.step": {
"total": 7335.438523396297,
"count": 1116096,
"is_parallel": true,
"self": 403.66893040476134,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 363.4501111073623,
"count": 1116096,
"is_parallel": true,
"self": 363.4501111073623
},
"communicator.exchange": {
"total": 5315.479628394358,
"count": 1116096,
"is_parallel": true,
"self": 5315.479628394358
},
"steps_from_proto": {
"total": 1252.8398534898151,
"count": 2232192,
"is_parallel": true,
"self": 251.06104848264658,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1001.7788050071686,
"count": 8928768,
"is_parallel": true,
"self": 1001.7788050071686
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 23541.840127203657,
"count": 1116096,
"self": 137.17520250451344,
"children": {
"process_trajectory": {
"total": 3179.262487899163,
"count": 1116096,
"self": 3176.297287999201,
"children": {
"RLTrainer._checkpoint": {
"total": 2.96519989996159,
"count": 33,
"self": 2.96519989996159
}
}
},
"_update_policy": {
"total": 20225.40243679998,
"count": 799,
"self": 1879.7964162002609,
"children": {
"TorchPOCAOptimizer.update": {
"total": 18345.60602059972,
"count": 23963,
"self": 18345.60602059972
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.00000761449337e-06,
"count": 1,
"self": 1.00000761449337e-06
},
"TrainerController._save_models": {
"total": 0.09908549999818206,
"count": 1,
"self": 0.00568639999255538,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09339910000562668,
"count": 1,
"self": 0.09339910000562668
}
}
}
}
}
}
}