ppo-Pyramids / run_logs /timers.json
Amankankriya's picture
First Push
03eb091 verified
raw
history blame
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.4969807267189026,
"min": 0.42631274461746216,
"max": 0.542986273765564,
"count": 7
},
"Pyramids.Policy.Entropy.sum": {
"value": 14694.7265625,
"min": 873.0885009765625,
"max": 16254.837890625,
"count": 7
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 7
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 7
},
"Pyramids.Step.mean": {
"value": 989971.0,
"min": 839959.0,
"max": 989971.0,
"count": 6
},
"Pyramids.Step.sum": {
"value": 989971.0,
"min": 839959.0,
"max": 989971.0,
"count": 6
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.25633999705314636,
"min": 0.0464077964425087,
"max": 0.25633999705314636,
"count": 6
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 65.36669921875,
"min": 11.509133338928223,
"max": 65.36669921875,
"count": 6
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.007073692977428436,
"min": 0.007073692977428436,
"max": 0.015582084655761719,
"count": 6
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 1.8037917613983154,
"min": 1.8037917613983154,
"max": 3.879939079284668,
"count": 6
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06593973143865924,
"min": 0.06584821953645563,
"max": 0.0691815057926921,
"count": 6
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9231562401412292,
"min": 0.7901786344374676,
"max": 1.0377225868903814,
"count": 6
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.010510177725807996,
"min": 0.006338277656357689,
"max": 0.010510177725807996,
"count": 6
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.14714248816131195,
"min": 0.08873588718900766,
"max": 0.14714248816131195,
"count": 6
},
"Pyramids.Policy.LearningRate.mean": {
"value": 1.2788740299414286e-05,
"min": 1.2788740299414286e-05,
"max": 8.7043207591375e-05,
"count": 6
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.0001790423641918,
"min": 0.0001790423641918,
"max": 0.0010873237825354998,
"count": 6
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10255772857142857,
"min": 0.10255772857142857,
"max": 0.117408625,
"count": 6
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4358082,
"min": 1.4089035,
"max": 1.7174645000000002,
"count": 6
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026551708428571437,
"min": 0.00026551708428571437,
"max": 0.0017491216375000007,
"count": 6
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003717239180000001,
"min": 0.003717239180000001,
"max": 0.021874703550000003,
"count": 6
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011271917261183262,
"min": 0.01120806485414505,
"max": 0.01225132867693901,
"count": 6
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.15780684351921082,
"min": 0.14701594412326813,
"max": 0.18066348135471344,
"count": 6
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 635.1132075471698,
"min": 588.7083333333334,
"max": 773.952380952381,
"count": 6
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 33661.0,
"min": 22501.0,
"max": 33661.0,
"count": 6
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.8746716693606017,
"min": 0.32096186422166373,
"max": 0.9086637983613826,
"count": 6
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 46.35759847611189,
"min": 13.480398297309875,
"max": 46.35759847611189,
"count": 6
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.8746716693606017,
"min": 0.32096186422166373,
"max": 0.9086637983613826,
"count": 6
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 46.35759847611189,
"min": 13.480398297309875,
"max": 46.35759847611189,
"count": 6
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.07392889281892974,
"min": 0.06827661918167847,
"max": 0.09641892619575151,
"count": 6
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.918231319403276,
"min": 2.90527749795001,
"max": 4.049594900221564,
"count": 6
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1706806088",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics --resume",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.0+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1706806501"
},
"total": 412.98999239900013,
"count": 1,
"self": 0.5277990220001811,
"children": {
"run_training.setup": {
"total": 0.045968931000061275,
"count": 1,
"self": 0.045968931000061275
},
"TrainerController.start_learning": {
"total": 412.4162244459999,
"count": 1,
"self": 0.25361718202157135,
"children": {
"TrainerController._reset_env": {
"total": 2.2507146850002755,
"count": 1,
"self": 2.2507146850002755
},
"TrainerController.advance": {
"total": 409.8228079509777,
"count": 12149,
"self": 0.26849173005894045,
"children": {
"env_step": {
"total": 288.91952816193316,
"count": 12149,
"self": 264.12411645289967,
"children": {
"SubprocessEnvManager._take_step": {
"total": 24.64340950700489,
"count": 12149,
"self": 0.8946365079896168,
"children": {
"TorchPolicy.evaluate": {
"total": 23.748772999015273,
"count": 11939,
"self": 23.748772999015273
}
}
},
"workers": {
"total": 0.15200220202859782,
"count": 12149,
"self": 0.0,
"children": {
"worker_root": {
"total": 411.31628305100185,
"count": 12149,
"is_parallel": true,
"self": 168.8024168019865,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.001999697999963246,
"count": 1,
"is_parallel": true,
"self": 0.0006718100003126892,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013278879996505566,
"count": 8,
"is_parallel": true,
"self": 0.0013278879996505566
}
}
},
"UnityEnvironment.step": {
"total": 0.057077552000009746,
"count": 1,
"is_parallel": true,
"self": 0.0006223160003173689,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005332819996510807,
"count": 1,
"is_parallel": true,
"self": 0.0005332819996510807
},
"communicator.exchange": {
"total": 0.05407051500014859,
"count": 1,
"is_parallel": true,
"self": 0.05407051500014859
},
"steps_from_proto": {
"total": 0.0018514389998927072,
"count": 1,
"is_parallel": true,
"self": 0.0003821119989879662,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001469327000904741,
"count": 8,
"is_parallel": true,
"self": 0.001469327000904741
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 242.51386624901534,
"count": 12148,
"is_parallel": true,
"self": 6.68029840002373,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 4.86561667698561,
"count": 12148,
"is_parallel": true,
"self": 4.86561667698561
},
"communicator.exchange": {
"total": 211.74738936802714,
"count": 12148,
"is_parallel": true,
"self": 211.74738936802714
},
"steps_from_proto": {
"total": 19.220561803978853,
"count": 12148,
"is_parallel": true,
"self": 3.8417280820990527,
"children": {
"_process_rank_one_or_two_observation": {
"total": 15.3788337218798,
"count": 97184,
"is_parallel": true,
"self": 15.3788337218798
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 120.63478805898558,
"count": 12149,
"self": 0.5097322969668312,
"children": {
"process_trajectory": {
"total": 24.777226010020513,
"count": 12149,
"self": 24.661879365020468,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1153466450000451,
"count": 1,
"self": 0.1153466450000451
}
}
},
"_update_policy": {
"total": 95.34782975199823,
"count": 88,
"self": 55.45195364298252,
"children": {
"TorchPPOOptimizer.update": {
"total": 39.89587610901572,
"count": 4308,
"self": 39.89587610901572
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.22000253922306e-07,
"count": 1,
"self": 9.22000253922306e-07
},
"TrainerController._save_models": {
"total": 0.0890837060001104,
"count": 1,
"self": 0.0023088230000212207,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08677488300008918,
"count": 1,
"self": 0.08677488300008918
}
}
}
}
}
}
}