ppo-Pyramids / run_logs /timers.json
crowbarmassage's picture
First Push
ef725e1
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5681068301200867,
"min": 0.5681068301200867,
"max": 1.4420626163482666,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 17034.115234375,
"min": 17034.115234375,
"max": 43746.41015625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989986.0,
"min": 29947.0,
"max": 989986.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989986.0,
"min": 29947.0,
"max": 989986.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.3412111699581146,
"min": -0.09827432781457901,
"max": 0.40206193923950195,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 89.39732360839844,
"min": -23.684112548828125,
"max": 108.95878601074219,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.007717915344983339,
"min": -0.027382386848330498,
"max": 0.3422132730484009,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -2.0220937728881836,
"min": -7.14680290222168,
"max": 81.10454559326172,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06644598210049378,
"min": 0.06520937134479811,
"max": 0.0723510230574077,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9966897315074068,
"min": 0.5698150683863924,
"max": 1.0742209238718108,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01164217336158294,
"min": 0.0009489312674346981,
"max": 0.014105456371561603,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.1746326004237441,
"min": 0.00948931267434698,
"max": 0.19747638920186245,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.546837484420003e-06,
"min": 7.546837484420003e-06,
"max": 0.0002950140391619875,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011320256226630005,
"min": 0.00011320256226630005,
"max": 0.0035088938303688005,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10251558000000002,
"min": 0.10251558000000002,
"max": 0.1983380125,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5377337000000004,
"min": 1.4781556000000002,
"max": 2.5696312000000003,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002613064420000001,
"min": 0.0002613064420000001,
"max": 0.00983396744875,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003919596630000002,
"min": 0.003919596630000002,
"max": 0.11698615688,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.008664456196129322,
"min": 0.008664456196129322,
"max": 0.3869248628616333,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1299668401479721,
"min": 0.12511391937732697,
"max": 3.0953989028930664,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 524.6031746031746,
"min": 449.4923076923077,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 33050.0,
"min": 16874.0,
"max": 33413.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.0943460051739027,
"min": -0.9999871489501768,
"max": 1.4581722854421688,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 68.94379832595587,
"min": -30.999601617455482,
"max": 94.78119855374098,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.0943460051739027,
"min": -0.9999871489501768,
"max": 1.4581722854421688,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 68.94379832595587,
"min": -30.999601617455482,
"max": 94.78119855374098,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04680416505901135,
"min": 0.041866600723453586,
"max": 8.089154235580388,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.9486623987177154,
"min": 2.721329047024483,
"max": 137.5156220048666,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1689113490",
"python_version": "3.10.12 (main, Jun 7 2023, 12:45:35) [GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1689115692"
},
"total": 2202.127658621,
"count": 1,
"self": 0.8861614200009171,
"children": {
"run_training.setup": {
"total": 0.039820418999852336,
"count": 1,
"self": 0.039820418999852336
},
"TrainerController.start_learning": {
"total": 2201.2016767819996,
"count": 1,
"self": 1.383927276999657,
"children": {
"TrainerController._reset_env": {
"total": 4.164244673999974,
"count": 1,
"self": 4.164244673999974
},
"TrainerController.advance": {
"total": 2195.5025151899995,
"count": 63550,
"self": 1.3912653279780898,
"children": {
"env_step": {
"total": 1528.205500737079,
"count": 63550,
"self": 1414.8180075501496,
"children": {
"SubprocessEnvManager._take_step": {
"total": 112.60184838396503,
"count": 63550,
"self": 4.726478841980224,
"children": {
"TorchPolicy.evaluate": {
"total": 107.87536954198481,
"count": 62562,
"self": 107.87536954198481
}
}
},
"workers": {
"total": 0.785644802964498,
"count": 63550,
"self": 0.0,
"children": {
"worker_root": {
"total": 2195.7268842569997,
"count": 63550,
"is_parallel": true,
"self": 896.5040692079797,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0019162609999057167,
"count": 1,
"is_parallel": true,
"self": 0.0005770889997620543,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013391720001436624,
"count": 8,
"is_parallel": true,
"self": 0.0013391720001436624
}
}
},
"UnityEnvironment.step": {
"total": 0.04831611399981739,
"count": 1,
"is_parallel": true,
"self": 0.0005843779999850085,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004982640000434913,
"count": 1,
"is_parallel": true,
"self": 0.0004982640000434913
},
"communicator.exchange": {
"total": 0.04512135400000261,
"count": 1,
"is_parallel": true,
"self": 0.04512135400000261
},
"steps_from_proto": {
"total": 0.0021121179997862782,
"count": 1,
"is_parallel": true,
"self": 0.00038708499982931244,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0017250329999569658,
"count": 8,
"is_parallel": true,
"self": 0.0017250329999569658
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1299.22281504902,
"count": 63549,
"is_parallel": true,
"self": 33.30049478098431,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.271404307068224,
"count": 63549,
"is_parallel": true,
"self": 22.271404307068224
},
"communicator.exchange": {
"total": 1140.0931859650211,
"count": 63549,
"is_parallel": true,
"self": 1140.0931859650211
},
"steps_from_proto": {
"total": 103.55772999594637,
"count": 63549,
"is_parallel": true,
"self": 19.886542000095687,
"children": {
"_process_rank_one_or_two_observation": {
"total": 83.67118799585069,
"count": 508392,
"is_parallel": true,
"self": 83.67118799585069
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 665.9057491249423,
"count": 63550,
"self": 2.7218466049596373,
"children": {
"process_trajectory": {
"total": 112.4122037049865,
"count": 63550,
"self": 112.14047624998648,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2717274550000184,
"count": 2,
"self": 0.2717274550000184
}
}
},
"_update_policy": {
"total": 550.7716988149962,
"count": 455,
"self": 355.4554813659827,
"children": {
"TorchPPOOptimizer.update": {
"total": 195.3162174490135,
"count": 22797,
"self": 195.3162174490135
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2300001799303573e-06,
"count": 1,
"self": 1.2300001799303573e-06
},
"TrainerController._save_models": {
"total": 0.1509884110000712,
"count": 1,
"self": 0.0020852699999522883,
"children": {
"RLTrainer._checkpoint": {
"total": 0.14890314100011892,
"count": 1,
"self": 0.14890314100011892
}
}
}
}
}
}
}