ppo-pyramids / run_logs /timers.json
ewertonfelipe's picture
pyramids
40a6c56
raw
history blame contribute delete
No virus
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5425063967704773,
"min": 0.5270963907241821,
"max": 1.413527488708496,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 16205.751953125,
"min": 15644.220703125,
"max": 42880.76953125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989958.0,
"min": 29952.0,
"max": 989958.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989958.0,
"min": 29952.0,
"max": 989958.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.3304080367088318,
"min": -0.14370810985565186,
"max": 0.33562496304512024,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 84.58445739746094,
"min": -34.05882263183594,
"max": 86.59123992919922,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.14816218614578247,
"min": -0.22038647532463074,
"max": 0.741780698299408,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 37.92951965332031,
"min": -56.859710693359375,
"max": 175.80203247070312,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06621870913178235,
"min": 0.0648226594926053,
"max": 0.0721828293223705,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9270619278449529,
"min": 0.5006122238202764,
"max": 1.0149849092213472,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.013899843539372089,
"min": 0.0009848378842930207,
"max": 0.019601497236902198,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.19459780955120926,
"min": 0.011818054611516249,
"max": 0.23520615516948357,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.54014748665e-06,
"min": 7.54014748665e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.0001055620648131,
"min": 0.0001055620648131,
"max": 0.0032562257145914994,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10251335000000002,
"min": 0.10251335000000002,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4351869000000002,
"min": 1.3886848,
"max": 2.4006906999999997,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.000261083665,
"min": 0.000261083665,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.00365517131,
"min": 0.00365517131,
"max": 0.10856230915000001,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011282798834145069,
"min": 0.011282798834145069,
"max": 0.8014407753944397,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.15795917809009552,
"min": 0.15795917809009552,
"max": 5.610085487365723,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 553.6785714285714,
"min": 482.1,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31006.0,
"min": 15984.0,
"max": 35040.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.1605142558525716,
"min": -1.0000000521540642,
"max": 1.3506950517414047,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 64.98879832774401,
"min": -29.8090016618371,
"max": 82.39239815622568,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.1605142558525716,
"min": -1.0000000521540642,
"max": 1.3506950517414047,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 64.98879832774401,
"min": -29.8090016618371,
"max": 82.39239815622568,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.06510442841174933,
"min": 0.058035823600519384,
"max": 16.753343254327774,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.6458479910579626,
"min": 3.3902062239067163,
"max": 268.0534920692444,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1697636455",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.1+cu118",
"numpy_version": "1.23.5",
"end_time_seconds": "1697638579"
},
"total": 2123.728369077,
"count": 1,
"self": 0.47568564300036087,
"children": {
"run_training.setup": {
"total": 0.04338381999991725,
"count": 1,
"self": 0.04338381999991725
},
"TrainerController.start_learning": {
"total": 2123.209299614,
"count": 1,
"self": 1.3452846249256254,
"children": {
"TrainerController._reset_env": {
"total": 3.042549364000024,
"count": 1,
"self": 3.042549364000024
},
"TrainerController.advance": {
"total": 2118.7452615240745,
"count": 63435,
"self": 1.3890371830834738,
"children": {
"env_step": {
"total": 1508.586103113992,
"count": 63435,
"self": 1385.400330533904,
"children": {
"SubprocessEnvManager._take_step": {
"total": 122.37893506408227,
"count": 63435,
"self": 4.543668387069829,
"children": {
"TorchPolicy.evaluate": {
"total": 117.83526667701244,
"count": 62572,
"self": 117.83526667701244
}
}
},
"workers": {
"total": 0.8068375160057712,
"count": 63435,
"self": 0.0,
"children": {
"worker_root": {
"total": 2118.6662928169944,
"count": 63435,
"is_parallel": true,
"self": 846.7371872440226,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017083019999972748,
"count": 1,
"is_parallel": true,
"self": 0.0005239869997240021,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011843150002732727,
"count": 8,
"is_parallel": true,
"self": 0.0011843150002732727
}
}
},
"UnityEnvironment.step": {
"total": 0.10526572000003398,
"count": 1,
"is_parallel": true,
"self": 0.0005666860001838359,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004555220000383997,
"count": 1,
"is_parallel": true,
"self": 0.0004555220000383997
},
"communicator.exchange": {
"total": 0.10258392299988373,
"count": 1,
"is_parallel": true,
"self": 0.10258392299988373
},
"steps_from_proto": {
"total": 0.001659588999928019,
"count": 1,
"is_parallel": true,
"self": 0.00040071000034913595,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001258878999578883,
"count": 8,
"is_parallel": true,
"self": 0.001258878999578883
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1271.9291055729718,
"count": 63434,
"is_parallel": true,
"self": 33.978409333879426,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.82013983698107,
"count": 63434,
"is_parallel": true,
"self": 23.82013983698107
},
"communicator.exchange": {
"total": 1118.0067510050155,
"count": 63434,
"is_parallel": true,
"self": 1118.0067510050155
},
"steps_from_proto": {
"total": 96.1238053970958,
"count": 63434,
"is_parallel": true,
"self": 18.905551253132444,
"children": {
"_process_rank_one_or_two_observation": {
"total": 77.21825414396335,
"count": 507472,
"is_parallel": true,
"self": 77.21825414396335
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 608.770121226999,
"count": 63435,
"self": 2.4759001920442643,
"children": {
"process_trajectory": {
"total": 115.64375175396003,
"count": 63435,
"self": 115.46989586395989,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1738558900001408,
"count": 2,
"self": 0.1738558900001408
}
}
},
"_update_policy": {
"total": 490.6504692809947,
"count": 444,
"self": 293.8579451389926,
"children": {
"TorchPPOOptimizer.update": {
"total": 196.7925241420021,
"count": 22803,
"self": 196.7925241420021
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0839999049494509e-06,
"count": 1,
"self": 1.0839999049494509e-06
},
"TrainerController._save_models": {
"total": 0.07620301699989795,
"count": 1,
"self": 0.001395037999827764,
"children": {
"RLTrainer._checkpoint": {
"total": 0.07480797900007019,
"count": 1,
"self": 0.07480797900007019
}
}
}
}
}
}
}