poca-SoccerTwos / run_logs /timers.json
archiMAD's picture
First Push
021b07a verified
raw
history blame contribute delete
No virus
15.6 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.7822149991989136,
"min": 1.7453781366348267,
"max": 3.295663833618164,
"count": 661
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 38609.90625,
"min": 10467.08984375,
"max": 125354.65625,
"count": 661
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 44.3177570093458,
"min": 39.5,
"max": 999.0,
"count": 661
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 18968.0,
"min": 16328.0,
"max": 24088.0,
"count": 661
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1538.7605143129633,
"min": 1178.5989668315106,
"max": 1562.7509806582796,
"count": 653
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 329294.75006297417,
"min": 2361.174532664744,
"max": 382318.7118468894,
"count": 653
},
"SoccerTwos.Step.mean": {
"value": 6609926.0,
"min": 9068.0,
"max": 6609926.0,
"count": 661
},
"SoccerTwos.Step.sum": {
"value": 6609926.0,
"min": 9068.0,
"max": 6609926.0,
"count": 661
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.014761903323233128,
"min": -0.12782159447669983,
"max": 0.1758817881345749,
"count": 661
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 3.173809289932251,
"min": -27.737285614013672,
"max": 25.6873779296875,
"count": 661
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.012893281877040863,
"min": -0.12705710530281067,
"max": 0.1728358417749405,
"count": 661
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 2.7720556259155273,
"min": -27.571392059326172,
"max": 26.044025421142578,
"count": 661
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 661
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 661
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.024597209276154983,
"min": -0.6153846153846154,
"max": 0.40380740717605307,
"count": 661
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 5.2883999943733215,
"min": -55.9611998796463,
"max": 60.58199977874756,
"count": 661
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.024597209276154983,
"min": -0.6153846153846154,
"max": 0.40380740717605307,
"count": 661
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 5.2883999943733215,
"min": -55.9611998796463,
"max": 60.58199977874756,
"count": 661
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 661
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 661
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01707396974573688,
"min": 0.010950594616588205,
"max": 0.02413030941679608,
"count": 318
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01707396974573688,
"min": 0.010950594616588205,
"max": 0.02413030941679608,
"count": 318
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10570046529173852,
"min": 0.0009495018108282238,
"max": 0.13799785524606706,
"count": 318
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10570046529173852,
"min": 0.0009495018108282238,
"max": 0.13799785524606706,
"count": 318
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10721415902177493,
"min": 0.000949964762548916,
"max": 0.14026200572649639,
"count": 318
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10721415902177493,
"min": 0.000949964762548916,
"max": 0.14026200572649639,
"count": 318
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 318
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 318
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 318
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 318
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 318
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 318
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1708950544",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\miniconda3\\envs\\ai-vs-ai\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.exe --run-id=SoccerTwos1 --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1708969079"
},
"total": 18534.7064361,
"count": 1,
"self": 0.14278510000440292,
"children": {
"run_training.setup": {
"total": 0.10079140000016196,
"count": 1,
"self": 0.10079140000016196
},
"TrainerController.start_learning": {
"total": 18534.462859599997,
"count": 1,
"self": 12.685384199059627,
"children": {
"TrainerController._reset_env": {
"total": 6.546452300004603,
"count": 34,
"self": 6.546452300004603
},
"TrainerController.advance": {
"total": 18515.087789400946,
"count": 455140,
"self": 12.294202002231032,
"children": {
"env_step": {
"total": 8933.962382802936,
"count": 455140,
"self": 6972.291975398686,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1953.9903446055905,
"count": 455140,
"self": 66.39807941307663,
"children": {
"TorchPolicy.evaluate": {
"total": 1887.5922651925139,
"count": 833048,
"self": 1887.5922651925139
}
}
},
"workers": {
"total": 7.680062798659492,
"count": 455140,
"self": 0.0,
"children": {
"worker_root": {
"total": 18512.59882730352,
"count": 455140,
"is_parallel": true,
"self": 13013.635489304957,
"children": {
"steps_from_proto": {
"total": 0.05948889988940209,
"count": 68,
"is_parallel": true,
"self": 0.01309129988658242,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.04639760000281967,
"count": 272,
"is_parallel": true,
"self": 0.04639760000281967
}
}
},
"UnityEnvironment.step": {
"total": 5498.903849098671,
"count": 455140,
"is_parallel": true,
"self": 270.8469743872265,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 264.11067800105957,
"count": 455140,
"is_parallel": true,
"self": 264.11067800105957
},
"communicator.exchange": {
"total": 4060.5262949026583,
"count": 455140,
"is_parallel": true,
"self": 4060.5262949026583
},
"steps_from_proto": {
"total": 903.419901807727,
"count": 910280,
"is_parallel": true,
"self": 190.2153265082161,
"children": {
"_process_rank_one_or_two_observation": {
"total": 713.2045752995109,
"count": 3641120,
"is_parallel": true,
"self": 713.2045752995109
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 9568.83120459578,
"count": 455140,
"self": 82.52617349205684,
"children": {
"process_trajectory": {
"total": 2069.552692403493,
"count": 455140,
"self": 2067.586379303524,
"children": {
"RLTrainer._checkpoint": {
"total": 1.9663130999688292,
"count": 13,
"self": 1.9663130999688292
}
}
},
"_update_policy": {
"total": 7416.752338700229,
"count": 319,
"self": 953.1598972998399,
"children": {
"TorchPOCAOptimizer.update": {
"total": 6463.592441400389,
"count": 9541,
"self": 6463.592441400389
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.6999983927235007e-06,
"count": 1,
"self": 1.6999983927235007e-06
},
"TrainerController._save_models": {
"total": 0.14323199998761993,
"count": 1,
"self": 0.002328799993847497,
"children": {
"RLTrainer._checkpoint": {
"total": 0.14090319999377243,
"count": 1,
"self": 0.14090319999377243
}
}
}
}
}
}
}