ppo-Pyramids / run_logs /timers.json
silver4444's picture
FirstPush
31435cf verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.2911440432071686,
"min": 0.2911440432071686,
"max": 1.462047815322876,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 8664.4462890625,
"min": 8664.4462890625,
"max": 44352.68359375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989995.0,
"min": 29980.0,
"max": 989995.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989995.0,
"min": 29980.0,
"max": 989995.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.47615519165992737,
"min": -0.11998161673545837,
"max": 0.5694788694381714,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 126.1811294555664,
"min": -28.67560577392578,
"max": 156.606689453125,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.0297621488571167,
"min": -0.008069469593465328,
"max": 0.38934046030044556,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 7.886969566345215,
"min": -2.186826229095459,
"max": 93.0523681640625,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06767994894964158,
"min": 0.06550985406458128,
"max": 0.07343427056820337,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0151992342446237,
"min": 0.587474164545627,
"max": 1.0652697917436906,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01517806023656424,
"min": 0.0014229360843799252,
"max": 0.01675326295562805,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.22767090354846362,
"min": 0.019921105181318954,
"max": 0.2447463477049799,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.476357507913331e-06,
"min": 7.476357507913331e-06,
"max": 0.00029476455174515,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011214536261869996,
"min": 0.00011214536261869996,
"max": 0.0037599853466716,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10249208666666668,
"min": 0.10249208666666668,
"max": 0.19825484999999998,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5373813,
"min": 1.4778100000000003,
"max": 2.6533284000000004,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.000258959458,
"min": 0.000258959458,
"max": 0.009825659514999998,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0038843918700000005,
"min": 0.0038843918700000005,
"max": 0.12534750715999998,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.013900975696742535,
"min": 0.013893802650272846,
"max": 0.369596004486084,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.20851463079452515,
"min": 0.1945132315158844,
"max": 2.956768035888672,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 378.8051948051948,
"min": 337.0769230769231,
"max": 979.5757575757576,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29168.0,
"min": 18952.0,
"max": 32975.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5172778975653958,
"min": -0.8546250502113253,
"max": 1.55523476101782,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 116.83039811253548,
"min": -27.34800160676241,
"max": 143.08159801363945,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5172778975653958,
"min": -0.8546250502113253,
"max": 1.55523476101782,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 116.83039811253548,
"min": -27.34800160676241,
"max": 143.08159801363945,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.055269279538399795,
"min": 0.05243994093034417,
"max": 5.521331264823675,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.255734524456784,
"min": 3.9329955697758123,
"max": 110.4266252964735,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1767580876",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=PyramidsTraining --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.8.0+cu128",
"numpy_version": "1.23.5",
"end_time_seconds": "1767583041"
},
"total": 2164.975924568,
"count": 1,
"self": 0.4806688070002565,
"children": {
"run_training.setup": {
"total": 0.023272053000255255,
"count": 1,
"self": 0.023272053000255255
},
"TrainerController.start_learning": {
"total": 2164.4719837079997,
"count": 1,
"self": 1.2367837740043797,
"children": {
"TrainerController._reset_env": {
"total": 2.0503934470002605,
"count": 1,
"self": 2.0503934470002605
},
"TrainerController.advance": {
"total": 2161.110011202995,
"count": 64096,
"self": 1.2565605120639702,
"children": {
"env_step": {
"total": 1534.988698559905,
"count": 64096,
"self": 1394.3631182469794,
"children": {
"SubprocessEnvManager._take_step": {
"total": 139.88459632396507,
"count": 64096,
"self": 4.405007860925252,
"children": {
"TorchPolicy.evaluate": {
"total": 135.47958846303982,
"count": 62561,
"self": 135.47958846303982
}
}
},
"workers": {
"total": 0.7409839889605792,
"count": 64096,
"self": 0.0,
"children": {
"worker_root": {
"total": 2157.77113635498,
"count": 64096,
"is_parallel": true,
"self": 875.1152142279088,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0020046910003657104,
"count": 1,
"is_parallel": true,
"self": 0.000684017000367021,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013206739999986894,
"count": 8,
"is_parallel": true,
"self": 0.0013206739999986894
}
}
},
"UnityEnvironment.step": {
"total": 0.05232849399999395,
"count": 1,
"is_parallel": true,
"self": 0.0005599740002253384,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00046264699994935654,
"count": 1,
"is_parallel": true,
"self": 0.00046264699994935654
},
"communicator.exchange": {
"total": 0.049591359999794804,
"count": 1,
"is_parallel": true,
"self": 0.049591359999794804
},
"steps_from_proto": {
"total": 0.0017145130000244535,
"count": 1,
"is_parallel": true,
"self": 0.0003568800002540229,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013576329997704306,
"count": 8,
"is_parallel": true,
"self": 0.0013576329997704306
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1282.6559221270713,
"count": 64095,
"is_parallel": true,
"self": 33.263558359869876,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.565035537101267,
"count": 64095,
"is_parallel": true,
"self": 22.565035537101267
},
"communicator.exchange": {
"total": 1122.9390805010898,
"count": 64095,
"is_parallel": true,
"self": 1122.9390805010898
},
"steps_from_proto": {
"total": 103.88824772901035,
"count": 64095,
"is_parallel": true,
"self": 21.70233003673411,
"children": {
"_process_rank_one_or_two_observation": {
"total": 82.18591769227623,
"count": 512760,
"is_parallel": true,
"self": 82.18591769227623
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 624.8647521310259,
"count": 64096,
"self": 2.5649058131380116,
"children": {
"process_trajectory": {
"total": 121.34610773987924,
"count": 64096,
"self": 121.17289571287938,
"children": {
"RLTrainer._checkpoint": {
"total": 0.17321202699986316,
"count": 2,
"self": 0.17321202699986316
}
}
},
"_update_policy": {
"total": 500.9537385780086,
"count": 462,
"self": 277.82036602109974,
"children": {
"TorchPPOOptimizer.update": {
"total": 223.13337255690885,
"count": 22716,
"self": 223.13337255690885
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.174999852082692e-06,
"count": 1,
"self": 1.174999852082692e-06
},
"TrainerController._save_models": {
"total": 0.07479410900032235,
"count": 1,
"self": 0.0009782160004760954,
"children": {
"RLTrainer._checkpoint": {
"total": 0.07381589299984626,
"count": 1,
"self": 0.07381589299984626
}
}
}
}
}
}
}