rajveer43's picture
first commit
bdf308d verified
raw
history blame
18.8 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.120480477809906,
"min": 0.11592471599578857,
"max": 1.5874193906784058,
"count": 100
},
"Pyramids.Policy.Entropy.sum": {
"value": 3689.59423828125,
"min": 3462.609130859375,
"max": 50797.421875,
"count": 100
},
"Pyramids.Step.mean": {
"value": 2999929.0,
"min": 29560.0,
"max": 2999929.0,
"count": 100
},
"Pyramids.Step.sum": {
"value": 2999929.0,
"min": 29560.0,
"max": 2999929.0,
"count": 100
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 1.0336419343948364,
"min": -0.08844733983278275,
"max": 1.1377360820770264,
"count": 100
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 138.50802612304688,
"min": -5.483735084533691,
"max": 171.79815673828125,
"count": 100
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.17655682563781738,
"min": 0.17141778767108917,
"max": 8.814286231994629,
"count": 100
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 23.658615112304688,
"min": 23.658615112304688,
"max": 528.857177734375,
"count": 100
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.0678099990593084,
"min": 0.0652631725372136,
"max": 0.07341942068417125,
"count": 100
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9493399868303176,
"min": 0.22025826205251373,
"max": 1.0184781826032043,
"count": 100
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.010579818666129672,
"min": 0.0075454162782335596,
"max": 6.112139113759622,
"count": 100
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.1481174613258154,
"min": 0.09809041161703627,
"max": 18.336417341278867,
"count": 100
},
"Pyramids.Policy.LearningRate.mean": {
"value": 5.086923485500009e-07,
"min": 5.086923485500009e-07,
"max": 9.946240053760001e-05,
"count": 100
},
"Pyramids.Policy.LearningRate.sum": {
"value": 7.1216928797000135e-06,
"min": 7.1216928797000135e-06,
"max": 0.0012949191384143,
"count": 100
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10025429642857142,
"min": 0.10025429642857142,
"max": 0.1497312,
"count": 100
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.40356015,
"min": 0.4491936,
"max": 2.0474595166666667,
"count": 100
},
"Pyramids.Policy.Beta.mean": {
"value": 3.537878357142863e-05,
"min": 3.537878357142863e-05,
"max": 0.0049731737600000004,
"count": 100
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0004953029700000008,
"min": 0.0004953029700000008,
"max": 0.06475645976333333,
"count": 100
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.029077773913741112,
"min": 0.02837519161403179,
"max": 2.857499837875366,
"count": 100
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.40708884596824646,
"min": 0.3890349268913269,
"max": 8.57249927520752,
"count": 100
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 233.0873015873016,
"min": 202.46938775510205,
"max": 999.0,
"count": 100
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29369.0,
"min": 19021.0,
"max": 32365.0,
"count": 100
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.7503174311584897,
"min": -1.0000000596046448,
"max": 1.7970205235154662,
"count": 100
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 220.5399963259697,
"min": -28.000001668930054,
"max": 267.3429970741272,
"count": 100
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.7503174311584897,
"min": -1.0000000596046448,
"max": 1.7970205235154662,
"count": 100
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 220.5399963259697,
"min": -28.000001668930054,
"max": 267.3429970741272,
"count": 100
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.3582887586265329,
"min": 0.3408968039367297,
"max": 247.367515846535,
"count": 100
},
"Pyramids.Policy.RndReward.sum": {
"value": 45.14438358694315,
"min": 45.14438358694315,
"max": 6678.922927856445,
"count": 100
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 100
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 100
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1726639607",
"python_version": "3.10.12 (main, Jul 29 2024, 16:56:48) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn /content/ml-agents/config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.4.0+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1726648416"
},
"total": 8809.083367394001,
"count": 1,
"self": 0.5358093630002259,
"children": {
"run_training.setup": {
"total": 0.0773793860000751,
"count": 1,
"self": 0.0773793860000751
},
"TrainerController.start_learning": {
"total": 8808.470178645,
"count": 1,
"self": 4.131340977975924,
"children": {
"TrainerController._reset_env": {
"total": 3.3101524130000826,
"count": 1,
"self": 3.3101524130000826
},
"TrainerController.advance": {
"total": 8800.919145981024,
"count": 196283,
"self": 4.090447587641393,
"children": {
"env_step": {
"total": 5655.40742349231,
"count": 196283,
"self": 5116.8528913124655,
"children": {
"SubprocessEnvManager._take_step": {
"total": 536.0457191310076,
"count": 196283,
"self": 13.932800421995125,
"children": {
"TorchPolicy.evaluate": {
"total": 522.1129187090124,
"count": 187652,
"self": 522.1129187090124
}
}
},
"workers": {
"total": 2.508813048836714,
"count": 196283,
"self": 0.0,
"children": {
"worker_root": {
"total": 8792.475132494816,
"count": 196283,
"is_parallel": true,
"self": 4046.625151891779,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0024285349999217942,
"count": 1,
"is_parallel": true,
"self": 0.0006705189997546768,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0017580160001671175,
"count": 8,
"is_parallel": true,
"self": 0.0017580160001671175
}
}
},
"UnityEnvironment.step": {
"total": 0.048741700000050514,
"count": 1,
"is_parallel": true,
"self": 0.0007070269999758239,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00044839600002433144,
"count": 1,
"is_parallel": true,
"self": 0.00044839600002433144
},
"communicator.exchange": {
"total": 0.04587008099997547,
"count": 1,
"is_parallel": true,
"self": 0.04587008099997547
},
"steps_from_proto": {
"total": 0.0017161960000748877,
"count": 1,
"is_parallel": true,
"self": 0.0003532489999997779,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013629470000751098,
"count": 8,
"is_parallel": true,
"self": 0.0013629470000751098
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 4745.849980603037,
"count": 196282,
"is_parallel": true,
"self": 100.62413507591828,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 68.55574473311856,
"count": 196282,
"is_parallel": true,
"self": 68.55574473311856
},
"communicator.exchange": {
"total": 4277.860185039084,
"count": 196282,
"is_parallel": true,
"self": 4277.860185039084
},
"steps_from_proto": {
"total": 298.8099157549159,
"count": 196282,
"is_parallel": true,
"self": 61.160367077778915,
"children": {
"_process_rank_one_or_two_observation": {
"total": 237.64954867713698,
"count": 1570256,
"is_parallel": true,
"self": 237.64954867713698
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 3141.421274901074,
"count": 196283,
"self": 9.627312215257007,
"children": {
"process_trajectory": {
"total": 365.83483695880545,
"count": 196283,
"self": 364.8032592338052,
"children": {
"RLTrainer._checkpoint": {
"total": 1.0315777250002611,
"count": 6,
"self": 1.0315777250002611
}
}
},
"_update_policy": {
"total": 2765.9591257270113,
"count": 1315,
"self": 1495.0012805201413,
"children": {
"TorchPPOOptimizer.update": {
"total": 1270.95784520687,
"count": 114145,
"self": 1270.95784520687
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.8600063463673e-07,
"count": 1,
"self": 8.8600063463673e-07
},
"TrainerController._save_models": {
"total": 0.1095383869997022,
"count": 1,
"self": 0.0015986430007615127,
"children": {
"RLTrainer._checkpoint": {
"total": 0.10793974399894068,
"count": 1,
"self": 0.10793974399894068
}
}
}
}
}
}
}