ppo-PyramidsRND / run_logs /timers.json
DarkAirforce's picture
First Push
40de664
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.4806901514530182,
"min": 0.46457338333129883,
"max": 1.509286642074585,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 14305.3388671875,
"min": 13989.2333984375,
"max": 45785.71875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989986.0,
"min": 29952.0,
"max": 989986.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989986.0,
"min": 29952.0,
"max": 989986.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.46322131156921387,
"min": -0.10201238095760345,
"max": 0.5155754685401917,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 126.92263793945312,
"min": -24.48297119140625,
"max": 140.2365264892578,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.04039676859974861,
"min": -0.04039676859974861,
"max": 0.3379466235637665,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -11.068714141845703,
"min": -11.068714141845703,
"max": 80.09335327148438,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07026558325938027,
"min": 0.06552534022972417,
"max": 0.07484890726426109,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9837181656313237,
"min": 0.5239423508498277,
"max": 1.08966584711258,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015307872783844632,
"min": 0.00011374262652312138,
"max": 0.015307872783844632,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21431021897382485,
"min": 0.0015923967713236994,
"max": 0.21431021897382485,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.467890367878568e-06,
"min": 7.467890367878568e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010455046515029996,
"min": 0.00010455046515029996,
"max": 0.0035069090310304,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10248926428571428,
"min": 0.10248926428571428,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4348497,
"min": 1.3886848,
"max": 2.5689696,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025867750214285704,
"min": 0.00025867750214285704,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003621485029999999,
"min": 0.003621485029999999,
"max": 0.11692006304000001,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011027449741959572,
"min": 0.011027449741959572,
"max": 0.3329103887081146,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1543843001127243,
"min": 0.1543843001127243,
"max": 2.3303728103637695,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 360.4634146341463,
"min": 360.4634146341463,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29558.0,
"min": 15984.0,
"max": 32657.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5663438861871637,
"min": -1.0000000521540642,
"max": 1.5663438861871637,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 128.44019866734743,
"min": -31.998001664876938,
"max": 128.44019866734743,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5663438861871637,
"min": -1.0000000521540642,
"max": 1.5663438861871637,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 128.44019866734743,
"min": -31.998001664876938,
"max": 128.44019866734743,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04146373877301812,
"min": 0.04146373877301812,
"max": 6.50433171633631,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.400026579387486,
"min": 3.242724022711627,
"max": 104.06930746138096,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1690391517",
"python_version": "3.10.6 (main, May 29 2023, 11:10:38) [GCC 11.3.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1690393612"
},
"total": 2095.1060870580004,
"count": 1,
"self": 0.48681907499985755,
"children": {
"run_training.setup": {
"total": 0.031005214000288106,
"count": 1,
"self": 0.031005214000288106
},
"TrainerController.start_learning": {
"total": 2094.5882627690003,
"count": 1,
"self": 1.32669109706967,
"children": {
"TrainerController._reset_env": {
"total": 4.0577858370002104,
"count": 1,
"self": 4.0577858370002104
},
"TrainerController.advance": {
"total": 2089.1086285639294,
"count": 63463,
"self": 1.3698474590592014,
"children": {
"env_step": {
"total": 1438.4763797010228,
"count": 63463,
"self": 1331.8593100960966,
"children": {
"SubprocessEnvManager._take_step": {
"total": 105.79417967197196,
"count": 63463,
"self": 4.595622309030205,
"children": {
"TorchPolicy.evaluate": {
"total": 101.19855736294176,
"count": 62555,
"self": 101.19855736294176
}
}
},
"workers": {
"total": 0.8228899329542401,
"count": 63463,
"self": 0.0,
"children": {
"worker_root": {
"total": 2089.7320063191414,
"count": 63463,
"is_parallel": true,
"self": 868.0056213691682,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017073590001928096,
"count": 1,
"is_parallel": true,
"self": 0.000550068000848114,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011572909993446956,
"count": 8,
"is_parallel": true,
"self": 0.0011572909993446956
}
}
},
"UnityEnvironment.step": {
"total": 0.04772115900004792,
"count": 1,
"is_parallel": true,
"self": 0.0005641980005748337,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004791829996975139,
"count": 1,
"is_parallel": true,
"self": 0.0004791829996975139
},
"communicator.exchange": {
"total": 0.04492968300019129,
"count": 1,
"is_parallel": true,
"self": 0.04492968300019129
},
"steps_from_proto": {
"total": 0.001748094999584282,
"count": 1,
"is_parallel": true,
"self": 0.0003509819989631069,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013971130006211752,
"count": 8,
"is_parallel": true,
"self": 0.0013971130006211752
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1221.7263849499732,
"count": 63462,
"is_parallel": true,
"self": 33.59547664998672,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.947498263910347,
"count": 63462,
"is_parallel": true,
"self": 22.947498263910347
},
"communicator.exchange": {
"total": 1066.7790750650006,
"count": 63462,
"is_parallel": true,
"self": 1066.7790750650006
},
"steps_from_proto": {
"total": 98.40433497107551,
"count": 63462,
"is_parallel": true,
"self": 19.753276786021615,
"children": {
"_process_rank_one_or_two_observation": {
"total": 78.6510581850539,
"count": 507696,
"is_parallel": true,
"self": 78.6510581850539
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 649.2624014038474,
"count": 63463,
"self": 2.343993215905357,
"children": {
"process_trajectory": {
"total": 105.70728281793299,
"count": 63463,
"self": 105.50124141793367,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20604139999932158,
"count": 2,
"self": 0.20604139999932158
}
}
},
"_update_policy": {
"total": 541.2111253700091,
"count": 444,
"self": 353.88759886308753,
"children": {
"TorchPPOOptimizer.update": {
"total": 187.32352650692155,
"count": 22815,
"self": 187.32352650692155
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.140004865708761e-07,
"count": 1,
"self": 8.140004865708761e-07
},
"TrainerController._save_models": {
"total": 0.0951564570004848,
"count": 1,
"self": 0.0013648860003740992,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09379157100011071,
"count": 1,
"self": 0.09379157100011071
}
}
}
}
}
}
}