CzarnyRycerz's picture
First Push`
4697602
raw
history blame contribute delete
No virus
15.2 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.7292778491973877,
"min": 1.6640934944152832,
"max": 1.7962404489517212,
"count": 49
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 17209.7734375,
"min": 12596.701171875,
"max": 22191.23046875,
"count": 49
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 53.955555555555556,
"min": 40.69767441860465,
"max": 81.16129032258064,
"count": 49
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 9712.0,
"min": 7000.0,
"max": 10684.0,
"count": 49
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1508.5287919842883,
"min": 1490.143744573098,
"max": 1513.876588566888,
"count": 49
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 135767.59127858595,
"min": 92431.31073084784,
"max": 180006.0418777591,
"count": 49
},
"SoccerTwos.Step.mean": {
"value": 6754998.0,
"min": 6514982.0,
"max": 6754998.0,
"count": 49
},
"SoccerTwos.Step.sum": {
"value": 6754998.0,
"min": 6514982.0,
"max": 6754998.0,
"count": 49
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.011795819737017155,
"min": -0.13557730615139008,
"max": 0.0849948599934578,
"count": 49
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -1.0616238117218018,
"min": -11.795226097106934,
"max": 6.544604301452637,
"count": 49
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.00861634686589241,
"min": -0.14430177211761475,
"max": 0.08878348767757416,
"count": 49
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.7754712104797363,
"min": -12.554253578186035,
"max": 6.836328506469727,
"count": 49
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 49
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 49
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.05251555575264825,
"min": -0.2765686741794448,
"max": 0.3353449278983517,
"count": 49
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -4.726400017738342,
"min": -22.95519995689392,
"max": 28.378599762916565,
"count": 49
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.05251555575264825,
"min": -0.2765686741794448,
"max": 0.3353449278983517,
"count": 49
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -4.726400017738342,
"min": -22.95519995689392,
"max": 28.378599762916565,
"count": 49
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 49
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 49
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017573047970654444,
"min": 0.014597583053182461,
"max": 0.021161130908876658,
"count": 11
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017573047970654444,
"min": 0.014597583053182461,
"max": 0.021161130908876658,
"count": 11
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.13159407824277877,
"min": 0.12053721149762471,
"max": 0.13899534344673156,
"count": 11
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.13159407824277877,
"min": 0.12053721149762471,
"max": 0.13899534344673156,
"count": 11
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.1348268409570058,
"min": 0.12216988056898118,
"max": 0.1415509695808093,
"count": 11
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.1348268409570058,
"min": 0.12216988056898118,
"max": 0.1415509695808093,
"count": 11
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 11
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 11
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 11
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 11
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 11
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 11
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1694301044",
"python_version": "3.9.17 (main, Jul 5 2023, 20:47:11) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\F:\\ProgramData\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.1+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1694302469"
},
"total": 1424.2489313,
"count": 1,
"self": 0.010233900000002905,
"children": {
"run_training.setup": {
"total": 0.13890439999999948,
"count": 1,
"self": 0.13890439999999948
},
"TrainerController.start_learning": {
"total": 1424.099793,
"count": 1,
"self": 0.596130299996048,
"children": {
"TrainerController._reset_env": {
"total": 4.470021500000097,
"count": 3,
"self": 4.470021500000097
},
"TrainerController.advance": {
"total": 1418.707166400004,
"count": 17241,
"self": 0.584840800009033,
"children": {
"env_step": {
"total": 408.37141819999925,
"count": 17241,
"self": 297.42648760000975,
"children": {
"SubprocessEnvManager._take_step": {
"total": 110.57506159999487,
"count": 17241,
"self": 3.8856886999988234,
"children": {
"TorchPolicy.evaluate": {
"total": 106.68937289999604,
"count": 31108,
"self": 106.68937289999604
}
}
},
"workers": {
"total": 0.369868999994619,
"count": 17241,
"self": 0.0,
"children": {
"worker_root": {
"total": 1360.326164599992,
"count": 17241,
"is_parallel": true,
"self": 1132.4555731999756,
"children": {
"steps_from_proto": {
"total": 0.007380800000023058,
"count": 6,
"is_parallel": true,
"self": 0.0016224999997893619,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.005758300000233696,
"count": 24,
"is_parallel": true,
"self": 0.005758300000233696
}
}
},
"UnityEnvironment.step": {
"total": 227.8632106000163,
"count": 17241,
"is_parallel": true,
"self": 11.888181800012006,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 12.338912099999533,
"count": 17241,
"is_parallel": true,
"self": 12.338912099999533
},
"communicator.exchange": {
"total": 164.6181298999897,
"count": 17241,
"is_parallel": true,
"self": 164.6181298999897
},
"steps_from_proto": {
"total": 39.01798680001505,
"count": 34482,
"is_parallel": true,
"self": 8.702775300027241,
"children": {
"_process_rank_one_or_two_observation": {
"total": 30.31521149998781,
"count": 137928,
"is_parallel": true,
"self": 30.31521149998781
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1009.7509073999959,
"count": 17241,
"self": 4.084613499997772,
"children": {
"process_trajectory": {
"total": 152.83068689999814,
"count": 17241,
"self": 152.83068689999814
},
"_update_policy": {
"total": 852.835607,
"count": 12,
"self": 72.28850790000251,
"children": {
"TorchPOCAOptimizer.update": {
"total": 780.5470990999975,
"count": 357,
"self": 780.5470990999975
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.9999999949504854e-06,
"count": 1,
"self": 1.9999999949504854e-06
},
"TrainerController._save_models": {
"total": 0.3264727999999195,
"count": 1,
"self": 0.053011699999842676,
"children": {
"RLTrainer._checkpoint": {
"total": 0.27346110000007684,
"count": 1,
"self": 0.27346110000007684
}
}
}
}
}
}
}