poca-SoccerTwos / run_logs /timers.json
ArneL2206's picture
First Push
505834e
raw
history blame contribute delete
No virus
20.1 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.9693621397018433,
"min": 1.9693621397018433,
"max": 3.2958312034606934,
"count": 500
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 38567.98828125,
"min": 23940.580078125,
"max": 130549.8671875,
"count": 500
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 91.19298245614036,
"min": 63.896103896103895,
"max": 999.0,
"count": 500
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 20792.0,
"min": 15936.0,
"max": 25408.0,
"count": 500
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1436.3633610517857,
"min": 1200.7447054581453,
"max": 1456.3359400568088,
"count": 495
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 163745.42315990356,
"min": 2414.8185677376273,
"max": 222401.58267636702,
"count": 495
},
"SoccerTwos.Step.mean": {
"value": 4999916.0,
"min": 9722.0,
"max": 4999916.0,
"count": 500
},
"SoccerTwos.Step.sum": {
"value": 4999916.0,
"min": 9722.0,
"max": 4999916.0,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.03328828513622284,
"min": -0.14581826329231262,
"max": 0.17683535814285278,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 3.7615761756896973,
"min": -17.18515396118164,
"max": 18.582040786743164,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.021687177941203117,
"min": -0.14562536776065826,
"max": 0.19014842808246613,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 2.450651168823242,
"min": -17.672834396362305,
"max": 19.479516983032227,
"count": 500
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 500
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.0660530988094026,
"min": -0.5005599995454152,
"max": 0.46273077164704984,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 7.464000165462494,
"min": -50.86040019989014,
"max": 42.979399621486664,
"count": 500
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.0660530988094026,
"min": -0.5005599995454152,
"max": 0.46273077164704984,
"count": 500
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 7.464000165462494,
"min": -50.86040019989014,
"max": 42.979399621486664,
"count": 500
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 500
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 500
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.049719220967963336,
"min": 0.01274991297905217,
"max": 0.05383361073210836,
"count": 240
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.049719220967963336,
"min": 0.01274991297905217,
"max": 0.05383361073210836,
"count": 240
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.04460492178797722,
"min": 0.0005625370285997633,
"max": 0.07234545573592185,
"count": 240
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.04460492178797722,
"min": 0.0005625370285997633,
"max": 0.07234545573592185,
"count": 240
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.05085183165967464,
"min": 0.0006860743058496154,
"max": 0.07716061219573021,
"count": 240
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.05085183165967464,
"min": 0.0006860743058496154,
"max": 0.07716061219573021,
"count": 240
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 240
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 240
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.19999999999999996,
"min": 0.19999999999999996,
"max": 0.19999999999999996,
"count": 240
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.19999999999999996,
"min": 0.19999999999999996,
"max": 0.19999999999999996,
"count": 240
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005,
"min": 0.005,
"max": 0.005,
"count": 240
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005,
"min": 0.005,
"max": 0.005,
"count": 240
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1676146148",
"python_version": "3.9.16 (main, Jan 11 2023, 16:05:54) \n[GCC 11.2.0]",
"command_line_arguments": "/home/arne/anaconda3/envs/rl2/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=../SoccerTwos/SoccerTwos.x86_64 --run-id=first run --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu113",
"numpy_version": "1.21.2",
"end_time_seconds": "1676149813"
},
"total": 3665.017267064002,
"count": 1,
"self": 0.16701423400445492,
"children": {
"run_training.setup": {
"total": 0.006198107999807689,
"count": 1,
"self": 0.006198107999807689
},
"TrainerController.start_learning": {
"total": 3664.844054721998,
"count": 1,
"self": 2.9737509766346193,
"children": {
"TrainerController._reset_env": {
"total": 2.9451986119820504,
"count": 25,
"self": 2.9451986119820504
},
"TrainerController.advance": {
"total": 3658.5846066943886,
"count": 332558,
"self": 2.940329932840541,
"children": {
"env_step": {
"total": 2300.57553591585,
"count": 332558,
"self": 1756.2113050449225,
"children": {
"SubprocessEnvManager._take_step": {
"total": 542.4074856922198,
"count": 332558,
"self": 19.19779383495552,
"children": {
"TorchPolicy.evaluate": {
"total": 523.2096918572643,
"count": 631250,
"self": 523.2096918572643
}
}
},
"workers": {
"total": 1.9567451787079335,
"count": 332558,
"self": 0.0,
"children": {
"worker_root": {
"total": 3660.5944827921376,
"count": 332558,
"is_parallel": true,
"self": 2238.8638966365324,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0010910209966823459,
"count": 2,
"is_parallel": true,
"self": 0.00025990900030592456,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0008311119963764213,
"count": 8,
"is_parallel": true,
"self": 0.0008311119963764213
}
}
},
"UnityEnvironment.step": {
"total": 0.011647046001598937,
"count": 1,
"is_parallel": true,
"self": 0.0003012590059370268,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00019559599968488328,
"count": 1,
"is_parallel": true,
"self": 0.00019559599968488328
},
"communicator.exchange": {
"total": 0.010364205998484977,
"count": 1,
"is_parallel": true,
"self": 0.010364205998484977
},
"steps_from_proto": {
"total": 0.0007859849974920508,
"count": 2,
"is_parallel": true,
"self": 0.0001598839917278383,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0006261010057642125,
"count": 8,
"is_parallel": true,
"self": 0.0006261010057642125
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1421.712222462611,
"count": 332557,
"is_parallel": true,
"self": 86.5455151380047,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 44.5132334888076,
"count": 332557,
"is_parallel": true,
"self": 44.5132334888076
},
"communicator.exchange": {
"total": 1060.7840537701777,
"count": 332557,
"is_parallel": true,
"self": 1060.7840537701777
},
"steps_from_proto": {
"total": 229.86942006562094,
"count": 665114,
"is_parallel": true,
"self": 41.2072640777842,
"children": {
"_process_rank_one_or_two_observation": {
"total": 188.66215598783674,
"count": 2660456,
"is_parallel": true,
"self": 188.66215598783674
}
}
}
}
},
"steps_from_proto": {
"total": 0.018363692994171288,
"count": 48,
"is_parallel": true,
"self": 0.0034599000100570265,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.014903792984114261,
"count": 192,
"is_parallel": true,
"self": 0.014903792984114261
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1355.068740845698,
"count": 332558,
"self": 25.436886592582596,
"children": {
"process_trajectory": {
"total": 258.2796718231184,
"count": 332558,
"self": 254.9649083441218,
"children": {
"RLTrainer._checkpoint": {
"total": 3.3147634789966105,
"count": 10,
"self": 3.3147634789966105
}
}
},
"_update_policy": {
"total": 1071.352182429997,
"count": 240,
"self": 505.0094022534249,
"children": {
"TorchPOCAOptimizer.update": {
"total": 566.342780176572,
"count": 12000,
"self": 566.342780176572
}
}
}
}
}
}
},
"trainer_threads": {
"total": 6.33997842669487e-07,
"count": 1,
"self": 6.33997842669487e-07
},
"TrainerController._save_models": {
"total": 0.3404978049948113,
"count": 1,
"self": 0.0046981169944047,
"children": {
"RLTrainer._checkpoint": {
"total": 0.3357996880004066,
"count": 1,
"self": 0.3357996880004066
}
}
}
}
}
}
}