ppo-Pyramids / run_logs /timers.json
JabrilJacobs's picture
First Push
5988ca3
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.2961265742778778,
"min": 0.2961265742778778,
"max": 1.4731642007827759,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 8793.7744140625,
"min": 8793.7744140625,
"max": 44689.91015625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989873.0,
"min": 29955.0,
"max": 989873.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989873.0,
"min": 29955.0,
"max": 989873.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.48438340425491333,
"min": -0.08469576388597488,
"max": 0.5985384583473206,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 131.75228881835938,
"min": -20.411678314208984,
"max": 168.18930053710938,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.022718539461493492,
"min": -0.014447993598878384,
"max": 0.2907489240169525,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 6.179442882537842,
"min": -4.030990123748779,
"max": 69.1982421875,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06812826525593595,
"min": 0.0650085542359649,
"max": 0.0722695742707801,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9537957135831033,
"min": 0.5585672515730532,
"max": 1.0708752759480074,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015156093027534163,
"min": 0.001224996735163574,
"max": 0.016277727732293506,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21218530238547828,
"min": 0.015924957557126463,
"max": 0.22788818825210908,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.736883135357138e-06,
"min": 7.736883135357138e-06,
"max": 0.0002948450642183125,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010831636389499992,
"min": 0.00010831636389499992,
"max": 0.0037593073468976,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10257892857142857,
"min": 0.10257892857142857,
"max": 0.19828168750000003,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.436105,
"min": 1.436105,
"max": 2.6531024000000007,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002676349642857141,
"min": 0.0002676349642857141,
"max": 0.009828340581250001,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0037468894999999973,
"min": 0.0037468894999999973,
"max": 0.12532492976,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011364920064806938,
"min": 0.01125144399702549,
"max": 0.44066911935806274,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.15910887718200684,
"min": 0.15910887718200684,
"max": 3.525352954864502,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 388.8493150684931,
"min": 316.6111111111111,
"max": 975.4193548387096,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 28386.0,
"min": 17137.0,
"max": 34154.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.450097279170075,
"min": -0.8484788392529343,
"max": 1.6203649753704668,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 107.30719865858555,
"min": -27.999801695346832,
"max": 147.17719861119986,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.450097279170075,
"min": -0.8484788392529343,
"max": 1.6203649753704668,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 107.30719865858555,
"min": -27.999801695346832,
"max": 147.17719861119986,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04619832187076886,
"min": 0.040724916400116246,
"max": 8.093496054410934,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.4186758184368955,
"min": 3.417164008889813,
"max": 145.68292897939682,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1691473850",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1691476257"
},
"total": 2407.3795726539993,
"count": 1,
"self": 0.5398008509991996,
"children": {
"run_training.setup": {
"total": 0.04068229500012421,
"count": 1,
"self": 0.04068229500012421
},
"TrainerController.start_learning": {
"total": 2406.799089508,
"count": 1,
"self": 1.4630413230324848,
"children": {
"TrainerController._reset_env": {
"total": 4.6732891059998565,
"count": 1,
"self": 4.6732891059998565
},
"TrainerController.advance": {
"total": 2400.5625640539674,
"count": 63885,
"self": 1.4702606948799257,
"children": {
"env_step": {
"total": 1732.5051737720405,
"count": 63885,
"self": 1614.8484683799868,
"children": {
"SubprocessEnvManager._take_step": {
"total": 116.77754469797537,
"count": 63885,
"self": 5.080930705943501,
"children": {
"TorchPolicy.evaluate": {
"total": 111.69661399203187,
"count": 62568,
"self": 111.69661399203187
}
}
},
"workers": {
"total": 0.8791606940783367,
"count": 63885,
"self": 0.0,
"children": {
"worker_root": {
"total": 2401.1130464359767,
"count": 63885,
"is_parallel": true,
"self": 908.4413508699442,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0019470170000204234,
"count": 1,
"is_parallel": true,
"self": 0.000674806999768407,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012722100002520165,
"count": 8,
"is_parallel": true,
"self": 0.0012722100002520165
}
}
},
"UnityEnvironment.step": {
"total": 0.10198190000005525,
"count": 1,
"is_parallel": true,
"self": 0.007994180000423512,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005007559998375655,
"count": 1,
"is_parallel": true,
"self": 0.0005007559998375655
},
"communicator.exchange": {
"total": 0.09118622899995898,
"count": 1,
"is_parallel": true,
"self": 0.09118622899995898
},
"steps_from_proto": {
"total": 0.0023007349998351856,
"count": 1,
"is_parallel": true,
"self": 0.00044504300012704334,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0018556919997081422,
"count": 8,
"is_parallel": true,
"self": 0.0018556919997081422
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1492.6716955660324,
"count": 63884,
"is_parallel": true,
"self": 35.4336236529964,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 25.029161920087063,
"count": 63884,
"is_parallel": true,
"self": 25.029161920087063
},
"communicator.exchange": {
"total": 1316.4239713619997,
"count": 63884,
"is_parallel": true,
"self": 1316.4239713619997
},
"steps_from_proto": {
"total": 115.78493863094923,
"count": 63884,
"is_parallel": true,
"self": 23.28286384199464,
"children": {
"_process_rank_one_or_two_observation": {
"total": 92.50207478895459,
"count": 511072,
"is_parallel": true,
"self": 92.50207478895459
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 666.587129587047,
"count": 63885,
"self": 2.7758071421189925,
"children": {
"process_trajectory": {
"total": 118.23108825292479,
"count": 63885,
"self": 118.02494558692501,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20614266599977782,
"count": 2,
"self": 0.20614266599977782
}
}
},
"_update_policy": {
"total": 545.5802341920032,
"count": 459,
"self": 353.21644556103274,
"children": {
"TorchPPOOptimizer.update": {
"total": 192.36378863097048,
"count": 22761,
"self": 192.36378863097048
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.22000253922306e-07,
"count": 1,
"self": 9.22000253922306e-07
},
"TrainerController._save_models": {
"total": 0.1001941029999216,
"count": 1,
"self": 0.001436554000065371,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09875754899985623,
"count": 1,
"self": 0.09875754899985623
}
}
}
}
}
}
}