poca-SoccerTwos / run_logs /timers.json
gael1130's picture
First Push`
50bb487 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.6892871856689453,
"min": 1.6637274026870728,
"max": 3.2958061695098877,
"count": 2504
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 18055.1015625,
"min": 7680.09765625,
"max": 105465.796875,
"count": 2504
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 73.37142857142857,
"min": 41.73684210526316,
"max": 999.0,
"count": 2504
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 10272.0,
"min": 6556.0,
"max": 24920.0,
"count": 2504
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1563.2672605397256,
"min": 1197.6947597061876,
"max": 1584.7162481858609,
"count": 2485
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 109428.7082377808,
"min": 2396.068531600837,
"max": 180657.65229318815,
"count": 2485
},
"SoccerTwos.Step.mean": {
"value": 12524938.0,
"min": 4476.0,
"max": 12524938.0,
"count": 2505
},
"SoccerTwos.Step.sum": {
"value": 12524938.0,
"min": 4476.0,
"max": 12524938.0,
"count": 2505
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.012251640670001507,
"min": -0.1846155971288681,
"max": 0.152823343873024,
"count": 2505
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -0.8576148748397827,
"min": -13.990399360656738,
"max": 12.732059478759766,
"count": 2505
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.008902576752007008,
"min": -0.18532951176166534,
"max": 0.1499340832233429,
"count": 2505
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.6231803894042969,
"min": -14.163141250610352,
"max": 12.865997314453125,
"count": 2505
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 2505
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 2505
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.06346285683768137,
"min": -0.8181818181818182,
"max": 0.6240166574716568,
"count": 2505
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -4.442399978637695,
"min": -42.39199995994568,
"max": 37.171600103378296,
"count": 2505
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.06346285683768137,
"min": -0.8181818181818182,
"max": 0.6240166574716568,
"count": 2505
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -4.442399978637695,
"min": -42.39199995994568,
"max": 37.171600103378296,
"count": 2505
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 2505
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 2505
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.027965155991397397,
"min": 0.022734053095931208,
"max": 0.03164981808044508,
"count": 249
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.027965155991397397,
"min": 0.022734053095931208,
"max": 0.03164981808044508,
"count": 249
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.052076314860540956,
"min": 0.0007150877383636583,
"max": 0.061866030692019396,
"count": 249
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.052076314860540956,
"min": 0.0007150877383636583,
"max": 0.061866030692019396,
"count": 249
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.054013946714500584,
"min": 0.0007275409075779188,
"max": 0.06516992378359039,
"count": 249
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.054013946714500584,
"min": 0.0007275409075779188,
"max": 0.06516992378359039,
"count": 249
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.000225084528971832,
"min": 0.000225084528971832,
"max": 0.000299688888103704,
"count": 249
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.000225084528971832,
"min": 0.000225084528971832,
"max": 0.000299688888103704,
"count": 249
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.25005633600000005,
"min": 0.25005633600000005,
"max": 0.29979259199999997,
"count": 249
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.25005633600000005,
"min": 0.25005633600000005,
"max": 0.29979259199999997,
"count": 249
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.007505313983200001,
"min": 0.007505313983200001,
"max": 0.009989639970400001,
"count": 249
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.007505313983200001,
"min": 0.007505313983200001,
"max": 0.009989639970400001,
"count": 249
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1711360012",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\gael1\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos3 --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1711383372"
},
"total": 23360.173033299972,
"count": 1,
"self": 0.22835280001163483,
"children": {
"run_training.setup": {
"total": 0.08911920001264662,
"count": 1,
"self": 0.08911920001264662
},
"TrainerController.start_learning": {
"total": 23359.855561299948,
"count": 1,
"self": 19.48167141363956,
"children": {
"TrainerController._reset_env": {
"total": 7.970532500185072,
"count": 126,
"self": 7.970532500185072
},
"TrainerController.advance": {
"total": 23332.289733886137,
"count": 860657,
"self": 17.791757446713746,
"children": {
"env_step": {
"total": 12407.574141020654,
"count": 860657,
"self": 9478.773316386738,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2916.604442927055,
"count": 860657,
"self": 107.05926301924046,
"children": {
"TorchPolicy.evaluate": {
"total": 2809.5451799078146,
"count": 1583938,
"self": 2809.5451799078146
}
}
},
"workers": {
"total": 12.196381706860848,
"count": 860656,
"self": 0.0,
"children": {
"worker_root": {
"total": 23327.420373377157,
"count": 860656,
"is_parallel": true,
"self": 16007.075391619117,
"children": {
"steps_from_proto": {
"total": 0.18477399949915707,
"count": 252,
"is_parallel": true,
"self": 0.037741698790341616,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.14703230070881546,
"count": 1008,
"is_parallel": true,
"self": 0.14703230070881546
}
}
},
"UnityEnvironment.step": {
"total": 7320.160207758541,
"count": 860656,
"is_parallel": true,
"self": 373.96662447741255,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 313.44459212664515,
"count": 860656,
"is_parallel": true,
"self": 313.44459212664515
},
"communicator.exchange": {
"total": 5443.519387620385,
"count": 860656,
"is_parallel": true,
"self": 5443.519387620385
},
"steps_from_proto": {
"total": 1189.2296035340987,
"count": 1721312,
"is_parallel": true,
"self": 241.04875135957263,
"children": {
"_process_rank_one_or_two_observation": {
"total": 948.1808521745261,
"count": 6885248,
"is_parallel": true,
"self": 948.1808521745261
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 10906.92383541877,
"count": 860656,
"self": 167.86869756120723,
"children": {
"process_trajectory": {
"total": 2661.9871018583653,
"count": 860656,
"self": 2659.399978758651,
"children": {
"RLTrainer._checkpoint": {
"total": 2.5871230997145176,
"count": 25,
"self": 2.5871230997145176
}
}
},
"_update_policy": {
"total": 8077.068035999197,
"count": 249,
"self": 1635.0027775001945,
"children": {
"TorchPOCAOptimizer.update": {
"total": 6442.065258499002,
"count": 36048,
"self": 6442.065258499002
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.00000761449337e-06,
"count": 1,
"self": 1.00000761449337e-06
},
"TrainerController._save_models": {
"total": 0.11362249997910112,
"count": 1,
"self": 0.008432799950242043,
"children": {
"RLTrainer._checkpoint": {
"total": 0.10518970002885908,
"count": 1,
"self": 0.10518970002885908
}
}
}
}
}
}
}