Soccer2v2 / run_logs /timers.json
Apocalypse-19's picture
Pushing the agent
c77dd75
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 3.0266637802124023,
"min": 3.0074007511138916,
"max": 3.2957417964935303,
"count": 150
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 26537.7890625,
"min": 11414.236328125,
"max": 123887.0703125,
"count": 150
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 999.0,
"min": 366.0,
"max": 999.0,
"count": 150
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19980.0,
"min": 16216.0,
"max": 24340.0,
"count": 150
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1200.9095637151368,
"min": 1200.3979646050168,
"max": 1208.0456558895698,
"count": 77
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 2401.8191274302735,
"min": 2400.7959292100336,
"max": 26562.48308573179,
"count": 77
},
"SoccerTwos.Step.mean": {
"value": 1499052.0,
"min": 9188.0,
"max": 1499052.0,
"count": 150
},
"SoccerTwos.Step.sum": {
"value": 1499052.0,
"min": 9188.0,
"max": 1499052.0,
"count": 150
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.0002922726853284985,
"min": -0.04077611491084099,
"max": 0.0026190406642854214,
"count": 150
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 0.002922726795077324,
"min": -0.5919223427772522,
"max": 0.07595217972993851,
"count": 150
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.00032525171991437674,
"min": -0.040881361812353134,
"max": 0.0031273856293410063,
"count": 150
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 0.0032525171991437674,
"min": -0.5923647880554199,
"max": 0.09069418162107468,
"count": 150
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 150
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 150
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.0,
"min": -0.5051034474424247,
"max": 0.3945249989628792,
"count": 150
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 0.0,
"min": -14.647999975830317,
"max": 6.312399983406067,
"count": 150
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.0,
"min": -0.5051034474424247,
"max": 0.3945249989628792,
"count": 150
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 0.0,
"min": -14.647999975830317,
"max": 6.312399983406067,
"count": 150
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 150
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 150
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.019270571356173605,
"min": 0.01109234259929508,
"max": 0.022038546949625017,
"count": 69
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.019270571356173605,
"min": 0.01109234259929508,
"max": 0.022038546949625017,
"count": 69
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 4.058379782160652e-07,
"min": 2.2539077188336402e-07,
"max": 0.00748855258959035,
"count": 69
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 4.058379782160652e-07,
"min": 2.2539077188336402e-07,
"max": 0.00748855258959035,
"count": 69
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 3.1599061856013577e-07,
"min": 1.9275289702136434e-07,
"max": 0.007870006328448653,
"count": 69
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 3.1599061856013577e-07,
"min": 1.9275289702136434e-07,
"max": 0.007870006328448653,
"count": 69
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 3.896998701333344e-07,
"min": 3.896998701333344e-07,
"max": 0.0002955628014790666,
"count": 69
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 3.896998701333344e-07,
"min": 3.896998701333344e-07,
"max": 0.0002955628014790666,
"count": 69
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.1001298666666667,
"min": 0.1001298666666667,
"max": 0.1985209333333334,
"count": 69
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.1001298666666667,
"min": 0.1001298666666667,
"max": 0.1985209333333334,
"count": 69
},
"SoccerTwos.Policy.Beta.mean": {
"value": 1.6480346666666688e-05,
"min": 1.6480346666666688e-05,
"max": 0.004926194573333333,
"count": 69
},
"SoccerTwos.Policy.Beta.sum": {
"value": 1.6480346666666688e-05,
"min": 1.6480346666666688e-05,
"max": 0.004926194573333333,
"count": 69
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1682416743",
"python_version": "3.9.16 | packaged by conda-forge | (main, Feb 1 2023, 21:39:03) \n[GCC 11.3.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos1 --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1682437942"
},
"total": 21199.008036495,
"count": 1,
"self": 0.7882214160017611,
"children": {
"run_training.setup": {
"total": 0.028646155000160434,
"count": 1,
"self": 0.028646155000160434
},
"TrainerController.start_learning": {
"total": 21198.191168924,
"count": 1,
"self": 2.9933048658458574,
"children": {
"TrainerController._reset_env": {
"total": 5.077267550999977,
"count": 8,
"self": 5.077267550999977
},
"TrainerController.advance": {
"total": 21189.687087228154,
"count": 97775,
"self": 3.5816408247337677,
"children": {
"env_step": {
"total": 20508.776001725786,
"count": 97775,
"self": 19900.856174474404,
"children": {
"SubprocessEnvManager._take_step": {
"total": 605.9272347737397,
"count": 97775,
"self": 19.04855248140234,
"children": {
"TorchPolicy.evaluate": {
"total": 586.8786822923373,
"count": 194110,
"self": 586.8786822923373
}
}
},
"workers": {
"total": 1.992592477641665,
"count": 97775,
"self": 0.0,
"children": {
"worker_root": {
"total": 21159.56748542517,
"count": 97775,
"is_parallel": true,
"self": 1673.710507693202,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0034612699998888274,
"count": 2,
"is_parallel": true,
"self": 0.0007102629997461918,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0027510070001426357,
"count": 8,
"is_parallel": true,
"self": 0.0027510070001426357
}
}
},
"UnityEnvironment.step": {
"total": 0.1844313480000892,
"count": 1,
"is_parallel": true,
"self": 0.00035165300027983903,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.003827283999953579,
"count": 1,
"is_parallel": true,
"self": 0.003827283999953579
},
"communicator.exchange": {
"total": 0.17497757099999944,
"count": 1,
"is_parallel": true,
"self": 0.17497757099999944
},
"steps_from_proto": {
"total": 0.00527483999985634,
"count": 2,
"is_parallel": true,
"self": 0.0007120100001429819,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.004562829999713358,
"count": 8,
"is_parallel": true,
"self": 0.004562829999713358
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 19485.827531176965,
"count": 97774,
"is_parallel": true,
"self": 51.70431021806871,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 278.590667788181,
"count": 97774,
"is_parallel": true,
"self": 278.590667788181
},
"communicator.exchange": {
"total": 18436.39052922608,
"count": 97774,
"is_parallel": true,
"self": 18436.39052922608
},
"steps_from_proto": {
"total": 719.1420239446365,
"count": 195548,
"is_parallel": true,
"self": 95.50878036110021,
"children": {
"_process_rank_one_or_two_observation": {
"total": 623.6332435835363,
"count": 782192,
"is_parallel": true,
"self": 623.6332435835363
}
}
}
}
},
"steps_from_proto": {
"total": 0.029446555003687536,
"count": 14,
"is_parallel": true,
"self": 0.00425714500624963,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.025189409997437906,
"count": 56,
"is_parallel": true,
"self": 0.025189409997437906
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 677.3294446776358,
"count": 97775,
"self": 23.256797303710755,
"children": {
"process_trajectory": {
"total": 175.80368680793185,
"count": 97775,
"self": 165.98220378292513,
"children": {
"RLTrainer._checkpoint": {
"total": 9.821483025006728,
"count": 30,
"self": 9.821483025006728
}
}
},
"_update_policy": {
"total": 478.2689605659932,
"count": 69,
"self": 308.31291992297315,
"children": {
"TorchPOCAOptimizer.update": {
"total": 169.95604064302006,
"count": 2070,
"self": 169.95604064302006
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3899989426136017e-06,
"count": 1,
"self": 1.3899989426136017e-06
},
"TrainerController._save_models": {
"total": 0.433507888999884,
"count": 1,
"self": 0.0025145049985439982,
"children": {
"RLTrainer._checkpoint": {
"total": 0.43099338400134,
"count": 1,
"self": 0.43099338400134
}
}
}
}
}
}
}