ppo-PyramidsRND / run_logs /timers.json
EntropicLettuce's picture
First Push
bb69021 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.48494869470596313,
"min": 0.4458564817905426,
"max": 1.4305806159973145,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 14494.146484375,
"min": 13582.572265625,
"max": 43398.09375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989909.0,
"min": 29952.0,
"max": 989909.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989909.0,
"min": 29952.0,
"max": 989909.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.4544605016708374,
"min": -0.09257080405950546,
"max": 0.4753778874874115,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 122.2498779296875,
"min": -22.21699333190918,
"max": 127.40127563476562,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.01365196518599987,
"min": -0.05046461150050163,
"max": 0.5272053480148315,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 3.6723785400390625,
"min": -12.666617393493652,
"max": 124.94766998291016,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06907490241267596,
"min": 0.06545852637549142,
"max": 0.07361949516972796,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9670486337774633,
"min": 0.5005067759424303,
"max": 1.0682951432293823,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015097734185632063,
"min": 0.0003599718032271872,
"max": 0.01654348267904771,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21136827859884888,
"min": 0.0025198026225903103,
"max": 0.2316087575066679,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.622990316178575e-06,
"min": 7.622990316178575e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010672186442650005,
"min": 0.00010672186442650005,
"max": 0.0029091435302856005,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10254096428571428,
"min": 0.10254096428571428,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4355735,
"min": 1.327104,
"max": 2.3597314000000003,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026384233214285724,
"min": 0.00026384233214285724,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0036937926500000017,
"min": 0.0036937926500000017,
"max": 0.09700446856,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011389526538550854,
"min": 0.011389526538550854,
"max": 0.48789069056510925,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1594533771276474,
"min": 0.1594533771276474,
"max": 3.4152348041534424,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 449.8550724637681,
"min": 397.6764705882353,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31040.0,
"min": 15984.0,
"max": 32843.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.4921622930661491,
"min": -1.0000000521540642,
"max": 1.5434852777158512,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 102.9591982215643,
"min": -32.000001668930054,
"max": 112.33799807727337,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.4921622930661491,
"min": -1.0000000521540642,
"max": 1.5434852777158512,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 102.9591982215643,
"min": -32.000001668930054,
"max": 112.33799807727337,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.052687975265882044,
"min": 0.04797114216746516,
"max": 10.168355417437851,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.635470293345861,
"min": 3.262037667387631,
"max": 162.69368667900562,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1727817054",
"python_version": "3.10.12 (main, Sep 11 2024, 15:47:36) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.4.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1727820396"
},
"total": 3342.0822148019997,
"count": 1,
"self": 0.6559751639997558,
"children": {
"run_training.setup": {
"total": 0.08156471999996029,
"count": 1,
"self": 0.08156471999996029
},
"TrainerController.start_learning": {
"total": 3341.3446749180002,
"count": 1,
"self": 2.3904796399083352,
"children": {
"TrainerController._reset_env": {
"total": 3.720933862000038,
"count": 1,
"self": 3.720933862000038
},
"TrainerController.advance": {
"total": 3335.1375806090923,
"count": 63529,
"self": 2.5486039721531597,
"children": {
"env_step": {
"total": 2185.466810155028,
"count": 63529,
"self": 2013.308944794122,
"children": {
"SubprocessEnvManager._take_step": {
"total": 170.6680165910018,
"count": 63529,
"self": 7.257320027049673,
"children": {
"TorchPolicy.evaluate": {
"total": 163.41069656395211,
"count": 62570,
"self": 163.41069656395211
}
}
},
"workers": {
"total": 1.4898487699040288,
"count": 63529,
"self": 0.0,
"children": {
"worker_root": {
"total": 3333.8334394340523,
"count": 63529,
"is_parallel": true,
"self": 1510.5286797009514,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.006073121000099491,
"count": 1,
"is_parallel": true,
"self": 0.0018826979996902082,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.004190423000409282,
"count": 8,
"is_parallel": true,
"self": 0.004190423000409282
}
}
},
"UnityEnvironment.step": {
"total": 0.2105785559999731,
"count": 1,
"is_parallel": true,
"self": 0.0008339419998719677,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005338329999631242,
"count": 1,
"is_parallel": true,
"self": 0.0005338329999631242
},
"communicator.exchange": {
"total": 0.2012841020000451,
"count": 1,
"is_parallel": true,
"self": 0.2012841020000451
},
"steps_from_proto": {
"total": 0.007926679000092918,
"count": 1,
"is_parallel": true,
"self": 0.001978233999807344,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.005948445000285574,
"count": 8,
"is_parallel": true,
"self": 0.005948445000285574
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1823.3047597331008,
"count": 63528,
"is_parallel": true,
"self": 50.064376761043604,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 30.727613304077977,
"count": 63528,
"is_parallel": true,
"self": 30.727613304077977
},
"communicator.exchange": {
"total": 1612.4939331609442,
"count": 63528,
"is_parallel": true,
"self": 1612.4939331609442
},
"steps_from_proto": {
"total": 130.01883650703508,
"count": 63528,
"is_parallel": true,
"self": 28.396681895394295,
"children": {
"_process_rank_one_or_two_observation": {
"total": 101.62215461164078,
"count": 508224,
"is_parallel": true,
"self": 101.62215461164078
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1147.1221664819116,
"count": 63529,
"self": 4.833881795924071,
"children": {
"process_trajectory": {
"total": 172.4370689779828,
"count": 63529,
"self": 172.18156362698278,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2555053510000107,
"count": 2,
"self": 0.2555053510000107
}
}
},
"_update_policy": {
"total": 969.8512157080047,
"count": 428,
"self": 388.923307605055,
"children": {
"TorchPPOOptimizer.update": {
"total": 580.9279081029497,
"count": 22845,
"self": 580.9279081029497
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.1579995771171525e-06,
"count": 1,
"self": 1.1579995771171525e-06
},
"TrainerController._save_models": {
"total": 0.09567964899997605,
"count": 1,
"self": 0.002464440000039758,
"children": {
"RLTrainer._checkpoint": {
"total": 0.0932152089999363,
"count": 1,
"self": 0.0932152089999363
}
}
}
}
}
}
}