poca-SoccerTwos / run_logs /timers.json
zipbomb's picture
First Push
55b93fd
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5888909101486206,
"min": 1.571339726448059,
"max": 3.2957446575164795,
"count": 1000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 31879.5078125,
"min": 28644.826171875,
"max": 147545.90625,
"count": 1000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 49.22222222222222,
"min": 40.51260504201681,
"max": 999.0,
"count": 1000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19492.0,
"min": 5084.0,
"max": 30192.0,
"count": 1000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1679.0176978760417,
"min": 1194.7070766179352,
"max": 1704.459136388523,
"count": 986
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 332445.5041794563,
"min": 2390.9871681159348,
"max": 398753.2600412559,
"count": 986
},
"SoccerTwos.Step.mean": {
"value": 9999946.0,
"min": 9512.0,
"max": 9999946.0,
"count": 1000
},
"SoccerTwos.Step.sum": {
"value": 9999946.0,
"min": 9512.0,
"max": 9999946.0,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.06979527324438095,
"min": -0.13052012026309967,
"max": 0.1664070039987564,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -13.819464683532715,
"min": -22.579980850219727,
"max": 25.442827224731445,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.07360540330410004,
"min": -0.12285048514604568,
"max": 0.16845263540744781,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -14.573869705200195,
"min": -21.351919174194336,
"max": 26.163158416748047,
"count": 1000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.09636161483899512,
"min": -0.5333333333333333,
"max": 0.40000701996318083,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -19.079599738121033,
"min": -58.31099957227707,
"max": 56.07600039243698,
"count": 1000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.09636161483899512,
"min": -0.5333333333333333,
"max": 0.40000701996318083,
"count": 1000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -19.079599738121033,
"min": -58.31099957227707,
"max": 56.07600039243698,
"count": 1000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.015936278041044714,
"min": 0.010288070857253235,
"max": 0.02491610542492708,
"count": 482
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.015936278041044714,
"min": 0.010288070857253235,
"max": 0.02491610542492708,
"count": 482
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.11955500766634941,
"min": 6.951558937847343e-06,
"max": 0.12987020537257193,
"count": 482
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.11955500766634941,
"min": 6.951558937847343e-06,
"max": 0.12987020537257193,
"count": 482
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.12300647149483362,
"min": 1.128340013565321e-05,
"max": 0.1324800950785478,
"count": 482
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.12300647149483362,
"min": 1.128340013565321e-05,
"max": 0.1324800950785478,
"count": 482
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 482
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 482
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 482
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 482
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 482
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 482
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1677295731",
"python_version": "3.10.6 (main, Nov 14 2022, 16:10:14) [GCC 11.3.0]",
"command_line_arguments": "/home/zipbomb/.local/share/virtualenvs/HF-RL-faxHmpJw/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env ../SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu113",
"numpy_version": "1.21.2",
"end_time_seconds": "1677320404"
},
"total": 24673.064149468002,
"count": 1,
"self": 0.26983999599906383,
"children": {
"run_training.setup": {
"total": 0.009792885000024398,
"count": 1,
"self": 0.009792885000024398
},
"TrainerController.start_learning": {
"total": 24672.784516587002,
"count": 1,
"self": 11.13979328739515,
"children": {
"TrainerController._reset_env": {
"total": 5.506416542994089,
"count": 50,
"self": 5.506416542994089
},
"TrainerController.advance": {
"total": 24655.923696224614,
"count": 685977,
"self": 9.669845434505987,
"children": {
"env_step": {
"total": 19836.44688725222,
"count": 685977,
"self": 12664.629140945897,
"children": {
"SubprocessEnvManager._take_step": {
"total": 7164.484170848609,
"count": 685977,
"self": 60.2685017676431,
"children": {
"TorchPolicy.evaluate": {
"total": 7104.215669080966,
"count": 1258688,
"self": 7104.215669080966
}
}
},
"workers": {
"total": 7.333575457713664,
"count": 685977,
"self": 0.0,
"children": {
"worker_root": {
"total": 24643.533769548787,
"count": 685977,
"is_parallel": true,
"self": 13214.31041869205,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0021718269999837503,
"count": 2,
"is_parallel": true,
"self": 0.0006294349999507176,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0015423920000330327,
"count": 8,
"is_parallel": true,
"self": 0.0015423920000330327
}
}
},
"UnityEnvironment.step": {
"total": 0.036916527000016686,
"count": 1,
"is_parallel": true,
"self": 0.000653515999715637,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0006365070000811102,
"count": 1,
"is_parallel": true,
"self": 0.0006365070000811102
},
"communicator.exchange": {
"total": 0.03375780500005021,
"count": 1,
"is_parallel": true,
"self": 0.03375780500005021
},
"steps_from_proto": {
"total": 0.0018686990001697268,
"count": 2,
"is_parallel": true,
"self": 0.0004022280004392087,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001466470999730518,
"count": 8,
"is_parallel": true,
"self": 0.001466470999730518
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 11429.15689735272,
"count": 685976,
"is_parallel": true,
"self": 345.7586457823545,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 211.27631940359606,
"count": 685976,
"is_parallel": true,
"self": 211.27631940359606
},
"communicator.exchange": {
"total": 9902.177730124135,
"count": 685976,
"is_parallel": true,
"self": 9902.177730124135
},
"steps_from_proto": {
"total": 969.9442020426347,
"count": 1371952,
"is_parallel": true,
"self": 168.79481287256135,
"children": {
"_process_rank_one_or_two_observation": {
"total": 801.1493891700734,
"count": 5487808,
"is_parallel": true,
"self": 801.1493891700734
}
}
}
}
},
"steps_from_proto": {
"total": 0.0664535040177725,
"count": 98,
"is_parallel": true,
"self": 0.011357043998032168,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.055096460019740334,
"count": 392,
"is_parallel": true,
"self": 0.055096460019740334
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 4809.806963537888,
"count": 685977,
"self": 85.04995679648437,
"children": {
"process_trajectory": {
"total": 2660.789986080395,
"count": 685977,
"self": 2656.5308116764036,
"children": {
"RLTrainer._checkpoint": {
"total": 4.259174403991437,
"count": 20,
"self": 4.259174403991437
}
}
},
"_update_policy": {
"total": 2063.967020661008,
"count": 482,
"self": 1139.4434591519298,
"children": {
"TorchPOCAOptimizer.update": {
"total": 924.5235615090783,
"count": 14469,
"self": 924.5235615090783
}
}
}
}
}
}
},
"trainer_threads": {
"total": 7.300004654098302e-07,
"count": 1,
"self": 7.300004654098302e-07
},
"TrainerController._save_models": {
"total": 0.21460980199844926,
"count": 1,
"self": 0.00179122999907122,
"children": {
"RLTrainer._checkpoint": {
"total": 0.21281857199937804,
"count": 1,
"self": 0.21281857199937804
}
}
}
}
}
}
}