poca-SoccerTwos / run_logs /timers.json
satyamandavilli's picture
First Push`
2085b51 verified
raw
history blame
No virus
16 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.8695679903030396,
"min": 1.8561608791351318,
"max": 3.2957324981689453,
"count": 741
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 37511.01171875,
"min": 14374.3935546875,
"max": 131923.46875,
"count": 741
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 74.15384615384616,
"min": 41.36974789915966,
"max": 999.0,
"count": 741
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19280.0,
"min": 14028.0,
"max": 27756.0,
"count": 741
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1601.087643019408,
"min": 1197.055581097732,
"max": 1619.8735147975078,
"count": 709
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 208141.39359252303,
"min": 2396.657233428197,
"max": 377081.69390390615,
"count": 709
},
"SoccerTwos.Step.mean": {
"value": 7409936.0,
"min": 9894.0,
"max": 7409936.0,
"count": 741
},
"SoccerTwos.Step.sum": {
"value": 7409936.0,
"min": 9894.0,
"max": 7409936.0,
"count": 741
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.014496799558401108,
"min": -0.1171058714389801,
"max": 0.20744945108890533,
"count": 741
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -1.899080753326416,
"min": -19.87245750427246,
"max": 27.615901947021484,
"count": 741
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.010194798931479454,
"min": -0.12086234986782074,
"max": 0.20504526793956757,
"count": 741
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -1.335518717765808,
"min": -20.733966827392578,
"max": 26.297012329101562,
"count": 741
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 741
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 741
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.04834198314724988,
"min": -0.6237600008646648,
"max": 0.48357390903908276,
"count": 741
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -6.332799792289734,
"min": -54.086000204086304,
"max": 61.03159999847412,
"count": 741
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.04834198314724988,
"min": -0.6237600008646648,
"max": 0.48357390903908276,
"count": 741
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -6.332799792289734,
"min": -54.086000204086304,
"max": 61.03159999847412,
"count": 741
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 741
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 741
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.016808574992076806,
"min": 0.009887452382827178,
"max": 0.023939259319255748,
"count": 355
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.016808574992076806,
"min": 0.009887452382827178,
"max": 0.023939259319255748,
"count": 355
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10378332709272703,
"min": 5.960457716961779e-06,
"max": 0.1252267780403296,
"count": 355
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10378332709272703,
"min": 5.960457716961779e-06,
"max": 0.1252267780403296,
"count": 355
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10573393156131108,
"min": 6.812734454797464e-06,
"max": 0.1279440276324749,
"count": 355
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10573393156131108,
"min": 6.812734454797464e-06,
"max": 0.1279440276324749,
"count": 355
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 355
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 355
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 355
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 355
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 355
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 355
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1718897448",
"python_version": "3.10.11 (tags/v3.10.11:7d4cc5a, Apr 5 2023, 00:38:17) [MSC v.1929 64 bit (AMD64)]",
"command_line_arguments": "C:\\POCs\\RAG\\deep-rl\\ml-agents\\venv\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1718948482"
},
"total": 51033.824782700016,
"count": 1,
"self": 0.03263080000760965,
"children": {
"run_training.setup": {
"total": 1.4725668000173755,
"count": 1,
"self": 1.4725668000173755
},
"TrainerController.start_learning": {
"total": 51032.31958509999,
"count": 1,
"self": 25.484635114320554,
"children": {
"TrainerController._reset_env": {
"total": 51.38374520002981,
"count": 38,
"self": 51.38374520002981
},
"TrainerController.advance": {
"total": 50954.992948485655,
"count": 502179,
"self": 26.751977856765734,
"children": {
"env_step": {
"total": 22406.75197801474,
"count": 502179,
"self": 16473.023351420707,
"children": {
"SubprocessEnvManager._take_step": {
"total": 5918.248750693747,
"count": 502179,
"self": 169.12540848905337,
"children": {
"TorchPolicy.evaluate": {
"total": 5749.1233422046935,
"count": 938788,
"self": 5749.1233422046935
}
}
},
"workers": {
"total": 15.47987590028788,
"count": 502179,
"self": 0.0,
"children": {
"worker_root": {
"total": 50943.18143299091,
"count": 502179,
"is_parallel": true,
"self": 37682.81295509299,
"children": {
"steps_from_proto": {
"total": 0.19373509986326098,
"count": 76,
"is_parallel": true,
"self": 0.033987099886871874,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.1597479999763891,
"count": 304,
"is_parallel": true,
"self": 0.1597479999763891
}
}
},
"UnityEnvironment.step": {
"total": 13260.174742798059,
"count": 502179,
"is_parallel": true,
"self": 764.8212337167934,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 608.9368677975144,
"count": 502179,
"is_parallel": true,
"self": 608.9368677975144
},
"communicator.exchange": {
"total": 9446.068704087433,
"count": 502179,
"is_parallel": true,
"self": 9446.068704087433
},
"steps_from_proto": {
"total": 2440.3479371963185,
"count": 1004358,
"is_parallel": true,
"self": 433.59629521882744,
"children": {
"_process_rank_one_or_two_observation": {
"total": 2006.751641977491,
"count": 4017432,
"is_parallel": true,
"self": 2006.751641977491
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 28521.488992614148,
"count": 502179,
"self": 172.43458791816374,
"children": {
"process_trajectory": {
"total": 5132.9358505963755,
"count": 502179,
"self": 5127.567651396355,
"children": {
"RLTrainer._checkpoint": {
"total": 5.368199200020172,
"count": 14,
"self": 5.368199200020172
}
}
},
"_update_policy": {
"total": 23216.11855409961,
"count": 356,
"self": 2201.4250492994033,
"children": {
"TorchPOCAOptimizer.update": {
"total": 21014.693504800205,
"count": 10660,
"self": 21014.693504800205
}
}
}
}
}
}
},
"trainer_threads": {
"total": 2.6999914553016424e-06,
"count": 1,
"self": 2.6999914553016424e-06
},
"TrainerController._save_models": {
"total": 0.4582535999943502,
"count": 1,
"self": 0.021307299990439788,
"children": {
"RLTrainer._checkpoint": {
"total": 0.43694630000391044,
"count": 1,
"self": 0.43694630000391044
}
}
}
}
}
}
}