SkyR's picture
First Push
a1c5b94
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.19593621790409088,
"min": 0.18135128915309906,
"max": 1.4679837226867676,
"count": 66
},
"Pyramids.Policy.Entropy.sum": {
"value": 5925.111328125,
"min": 5452.14501953125,
"max": 44532.75390625,
"count": 66
},
"Pyramids.Step.mean": {
"value": 1979961.0,
"min": 29952.0,
"max": 1979961.0,
"count": 66
},
"Pyramids.Step.sum": {
"value": 1979961.0,
"min": 29952.0,
"max": 1979961.0,
"count": 66
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.7173455953598022,
"min": -0.08874265104532242,
"max": 0.8296836018562317,
"count": 66
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 212.33428955078125,
"min": -21.298236846923828,
"max": 253.8831787109375,
"count": 66
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.0003342825220897794,
"min": -0.007599753327667713,
"max": 0.6566954255104065,
"count": 66
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -0.09894762933254242,
"min": -2.2951254844665527,
"max": 155.63681030273438,
"count": 66
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06857218653432723,
"min": 0.06413555388490923,
"max": 0.07490956852589513,
"count": 66
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0285827980149085,
"min": 0.52379409238949,
"max": 1.0690076710501066,
"count": 66
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.014588604698541995,
"min": 0.0004832045657261958,
"max": 0.01590774849492369,
"count": 66
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2188290704781299,
"min": 0.005315250222988154,
"max": 0.2373526686084612,
"count": 66
},
"Pyramids.Policy.LearningRate.mean": {
"value": 5.976221575650002e-06,
"min": 5.976221575650002e-06,
"max": 0.0003372520236653715,
"count": 66
},
"Pyramids.Policy.LearningRate.sum": {
"value": 8.964332363475003e-05,
"min": 8.964332363475003e-05,
"max": 0.004223965167657351,
"count": 66
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10175768333333331,
"min": 0.10175768333333331,
"max": 0.19919177142857142,
"count": 66
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5263652499999996,
"min": 1.3943424,
"max": 2.6513022000000004,
"count": 66
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00018559256500000004,
"min": 0.00018559256500000004,
"max": 0.009919257965714285,
"count": 66
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0027838884750000007,
"min": 0.0027838884750000007,
"max": 0.12425003073500002,
"count": 66
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.006712593138217926,
"min": 0.006492404732853174,
"max": 0.49319568276405334,
"count": 66
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.10068889707326889,
"min": 0.09089366346597672,
"max": 3.4523696899414062,
"count": 66
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 273.69565217391306,
"min": 217.8235294117647,
"max": 999.0,
"count": 66
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31475.0,
"min": 15984.0,
"max": 33260.0,
"count": 66
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6741112854169762,
"min": -1.0000000521540642,
"max": 1.7733511343484616,
"count": 66
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 192.52279782295227,
"min": -31.999601677060127,
"max": 240.2503986954689,
"count": 66
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6741112854169762,
"min": -1.0000000521540642,
"max": 1.7733511343484616,
"count": 66
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 192.52279782295227,
"min": -31.999601677060127,
"max": 240.2503986954689,
"count": 66
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.019038819999727622,
"min": 0.015872363847137402,
"max": 10.959095973521471,
"count": 66
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.1894642999686766,
"min": 1.9536741457995959,
"max": 175.34553557634354,
"count": 66
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 66
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 66
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1702911633",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.2+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1702916629"
},
"total": 4995.967483816999,
"count": 1,
"self": 0.47641097900032037,
"children": {
"run_training.setup": {
"total": 0.04988553899966064,
"count": 1,
"self": 0.04988553899966064
},
"TrainerController.start_learning": {
"total": 4995.4411872989995,
"count": 1,
"self": 3.1810825380653114,
"children": {
"TrainerController._reset_env": {
"total": 2.224548134000088,
"count": 1,
"self": 2.224548134000088
},
"TrainerController.advance": {
"total": 4989.943705978933,
"count": 129359,
"self": 3.2286951829491954,
"children": {
"env_step": {
"total": 3693.9778236580114,
"count": 129359,
"self": 3413.0291321272466,
"children": {
"SubprocessEnvManager._take_step": {
"total": 278.9382107049337,
"count": 129359,
"self": 10.260352484872328,
"children": {
"TorchPolicy.evaluate": {
"total": 268.67785822006135,
"count": 125060,
"self": 268.67785822006135
}
}
},
"workers": {
"total": 2.0104808258311095,
"count": 129359,
"self": 0.0,
"children": {
"worker_root": {
"total": 4983.87277686697,
"count": 129359,
"is_parallel": true,
"self": 1836.7107776388257,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0018087190001097042,
"count": 1,
"is_parallel": true,
"self": 0.0006253760002437048,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011833429998659994,
"count": 8,
"is_parallel": true,
"self": 0.0011833429998659994
}
}
},
"UnityEnvironment.step": {
"total": 0.07885554700033026,
"count": 1,
"is_parallel": true,
"self": 0.0005980920004731161,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00044997999975748826,
"count": 1,
"is_parallel": true,
"self": 0.00044997999975748826
},
"communicator.exchange": {
"total": 0.07611652500008859,
"count": 1,
"is_parallel": true,
"self": 0.07611652500008859
},
"steps_from_proto": {
"total": 0.001690950000011071,
"count": 1,
"is_parallel": true,
"self": 0.0003435629996602074,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013473870003508637,
"count": 8,
"is_parallel": true,
"self": 0.0013473870003508637
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 3147.161999228144,
"count": 129358,
"is_parallel": true,
"self": 73.09813539426932,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 52.83679982310514,
"count": 129358,
"is_parallel": true,
"self": 52.83679982310514
},
"communicator.exchange": {
"total": 2806.8263952748243,
"count": 129358,
"is_parallel": true,
"self": 2806.8263952748243
},
"steps_from_proto": {
"total": 214.40066873594515,
"count": 129358,
"is_parallel": true,
"self": 44.4393719286586,
"children": {
"_process_rank_one_or_two_observation": {
"total": 169.96129680728654,
"count": 1034864,
"is_parallel": true,
"self": 169.96129680728654
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1292.7371871379733,
"count": 129359,
"self": 6.1875671259526825,
"children": {
"process_trajectory": {
"total": 274.1138405380243,
"count": 129359,
"self": 273.5613292140242,
"children": {
"RLTrainer._checkpoint": {
"total": 0.5525113240000792,
"count": 4,
"self": 0.5525113240000792
}
}
},
"_update_policy": {
"total": 1012.4357794739963,
"count": 919,
"self": 602.3328376130085,
"children": {
"TorchPPOOptimizer.update": {
"total": 410.1029418609878,
"count": 45600,
"self": 410.1029418609878
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.1299998732283711e-06,
"count": 1,
"self": 1.1299998732283711e-06
},
"TrainerController._save_models": {
"total": 0.09184951800034469,
"count": 1,
"self": 0.0015475990003324114,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09030191900001228,
"count": 1,
"self": 0.09030191900001228
}
}
}
}
}
}
}