poca-SoccerTwos / run_logs /timers.json
RegisGraptin's picture
First Push
39a8c8c
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.292132616043091,
"min": 2.2572052478790283,
"max": 3.2958037853240967,
"count": 1126
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 46869.52734375,
"min": 8051.8291015625,
"max": 161575.640625,
"count": 1126
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 52.784946236559136,
"min": 39.47154471544715,
"max": 999.0,
"count": 1126
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19636.0,
"min": 12904.0,
"max": 27260.0,
"count": 1126
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1540.1519592437812,
"min": 1181.6775315885016,
"max": 1549.7725386332547,
"count": 1008
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 286468.2644193433,
"min": 2363.355063177003,
"max": 350163.47599503293,
"count": 1008
},
"SoccerTwos.Step.mean": {
"value": 11259942.0,
"min": 9648.0,
"max": 11259942.0,
"count": 1126
},
"SoccerTwos.Step.sum": {
"value": 11259942.0,
"min": 9648.0,
"max": 11259942.0,
"count": 1126
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.02175154536962509,
"min": -0.08181066811084747,
"max": 0.16583144664764404,
"count": 1126
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -4.045787334442139,
"min": -14.971351623535156,
"max": 21.337879180908203,
"count": 1126
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.021867506206035614,
"min": -0.0818493589758873,
"max": 0.16594870388507843,
"count": 1126
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -4.067356109619141,
"min": -15.960625648498535,
"max": 21.526124954223633,
"count": 1126
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1126
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1126
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.03914623683498752,
"min": -0.6153846153846154,
"max": 0.41731489085136575,
"count": 1126
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -7.281200051307678,
"min": -52.95340007543564,
"max": 59.68079996109009,
"count": 1126
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.03914623683498752,
"min": -0.6153846153846154,
"max": 0.41731489085136575,
"count": 1126
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -7.281200051307678,
"min": -52.95340007543564,
"max": 59.68079996109009,
"count": 1126
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1126
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1126
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.015966420065403022,
"min": 0.009779906947126922,
"max": 0.025029756675940006,
"count": 533
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.015966420065403022,
"min": 0.009779906947126922,
"max": 0.025029756675940006,
"count": 533
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.07688813060522079,
"min": 3.8149103147588904e-07,
"max": 0.08983085478345552,
"count": 533
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.07688813060522079,
"min": 3.8149103147588904e-07,
"max": 0.08983085478345552,
"count": 533
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.07765562906861305,
"min": 5.127459023886634e-07,
"max": 0.09085923979679743,
"count": 533
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.07765562906861305,
"min": 5.127459023886634e-07,
"max": 0.09085923979679743,
"count": 533
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 533
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 533
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.17999999999999997,
"min": 0.17999999999999997,
"max": 0.17999999999999997,
"count": 533
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.17999999999999997,
"min": 0.17999999999999997,
"max": 0.17999999999999997,
"count": 533
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.007000000000000002,
"min": 0.007000000000000002,
"max": 0.007000000000000002,
"count": 533
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.007000000000000002,
"min": 0.007000000000000002,
"max": 0.007000000000000002,
"count": 533
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1675421136",
"python_version": "3.8.16 (default, Jan 17 2023, 23:13:24) \n[GCC 11.2.0]",
"command_line_arguments": "/home/rere/miniconda3/envs/rl/bin/mlagents-learn ./ml-agents/config/poca/SoccerTwos.yaml --env=./ml-agents/training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos_01 --no-graphics --torch-device cuda:0",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.8.1+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1675439443"
},
"total": 18306.663035994003,
"count": 1,
"self": 0.9977862580017245,
"children": {
"run_training.setup": {
"total": 0.007945340999867767,
"count": 1,
"self": 0.007945340999867767
},
"TrainerController.start_learning": {
"total": 18305.657304395,
"count": 1,
"self": 15.374812116697285,
"children": {
"TrainerController._reset_env": {
"total": 9.146132168985787,
"count": 57,
"self": 9.146132168985787
},
"TrainerController.advance": {
"total": 18281.01684047932,
"count": 751722,
"self": 16.552055113919778,
"children": {
"env_step": {
"total": 14103.620230038265,
"count": 751722,
"self": 11001.98278670678,
"children": {
"SubprocessEnvManager._take_step": {
"total": 3092.02788804421,
"count": 751722,
"self": 94.30382046052819,
"children": {
"TorchPolicy.evaluate": {
"total": 2997.724067583682,
"count": 1439398,
"self": 2997.724067583682
}
}
},
"workers": {
"total": 9.609555287273906,
"count": 751721,
"self": 0.0,
"children": {
"worker_root": {
"total": 18282.71187319878,
"count": 751721,
"is_parallel": true,
"self": 9090.156762385635,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.002979571000651049,
"count": 2,
"is_parallel": true,
"self": 0.0013245250020190724,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0016550459986319765,
"count": 8,
"is_parallel": true,
"self": 0.0016550459986319765
}
}
},
"UnityEnvironment.step": {
"total": 0.023795369999788818,
"count": 1,
"is_parallel": true,
"self": 0.0004765539988511591,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00040580000040790765,
"count": 1,
"is_parallel": true,
"self": 0.00040580000040790765
},
"communicator.exchange": {
"total": 0.021398771999884048,
"count": 1,
"is_parallel": true,
"self": 0.021398771999884048
},
"steps_from_proto": {
"total": 0.0015142440006457036,
"count": 2,
"is_parallel": true,
"self": 0.0003207240006304346,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001193520000015269,
"count": 8,
"is_parallel": true,
"self": 0.001193520000015269
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 9192.44553035613,
"count": 751720,
"is_parallel": true,
"self": 572.807218609305,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 322.06720877933185,
"count": 751720,
"is_parallel": true,
"self": 322.06720877933185
},
"communicator.exchange": {
"total": 6728.8590341110075,
"count": 751720,
"is_parallel": true,
"self": 6728.8590341110075
},
"steps_from_proto": {
"total": 1568.7120688564864,
"count": 1503440,
"is_parallel": true,
"self": 261.1362915857608,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1307.5757772707257,
"count": 6013760,
"is_parallel": true,
"self": 1307.5757772707257
}
}
}
}
},
"steps_from_proto": {
"total": 0.10958045701499941,
"count": 112,
"is_parallel": true,
"self": 0.01811771900520398,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.09146273800979543,
"count": 448,
"is_parallel": true,
"self": 0.09146273800979543
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 4160.844555327133,
"count": 751721,
"self": 101.44228592397576,
"children": {
"process_trajectory": {
"total": 1276.6872187961626,
"count": 751721,
"self": 1273.6160992441573,
"children": {
"RLTrainer._checkpoint": {
"total": 3.071119552005257,
"count": 22,
"self": 3.071119552005257
}
}
},
"_update_policy": {
"total": 2782.7150506069947,
"count": 534,
"self": 1481.6673787659538,
"children": {
"TorchPOCAOptimizer.update": {
"total": 1301.047671841041,
"count": 16020,
"self": 1301.047671841041
}
}
}
}
}
}
},
"trainer_threads": {
"total": 7.779999577905983e-07,
"count": 1,
"self": 7.779999577905983e-07
},
"TrainerController._save_models": {
"total": 0.11951885199960088,
"count": 1,
"self": 0.0011729610014299396,
"children": {
"RLTrainer._checkpoint": {
"total": 0.11834589099817094,
"count": 1,
"self": 0.11834589099817094
}
}
}
}
}
}
}