yizhangliu's picture
First Push
31e5541
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5916613340377808,
"min": 1.565659523010254,
"max": 1.9029737710952759,
"count": 300
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 30661.763671875,
"min": 29692.654296875,
"max": 42748.65625,
"count": 300
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 77.828125,
"min": 43.42982456140351,
"max": 94.14814814814815,
"count": 300
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19924.0,
"min": 18472.0,
"max": 20832.0,
"count": 300
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1661.7193149822251,
"min": 1613.413606125936,
"max": 1686.6873850756538,
"count": 300
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 212700.07231772481,
"min": 179700.28705761017,
"max": 380967.3052411987,
"count": 300
},
"SoccerTwos.Step.mean": {
"value": 10999966.0,
"min": 8009938.0,
"max": 10999966.0,
"count": 300
},
"SoccerTwos.Step.sum": {
"value": 10999966.0,
"min": 8009938.0,
"max": 10999966.0,
"count": 300
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.008128762245178223,
"min": -0.13995903730392456,
"max": 0.08404076844453812,
"count": 300
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -1.0323528051376343,
"min": -26.872135162353516,
"max": 17.396438598632812,
"count": 300
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.01394909992814064,
"min": -0.14245162904262543,
"max": 0.08465230464935303,
"count": 300
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -1.7715356349945068,
"min": -27.350711822509766,
"max": 17.523027420043945,
"count": 300
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 300
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 300
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.03170393489477202,
"min": -0.39619117536965537,
"max": 0.27102511833255416,
"count": 300
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 4.026399731636047,
"min": -58.27240002155304,
"max": 56.102199494838715,
"count": 300
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.03170393489477202,
"min": -0.39619117536965537,
"max": 0.27102511833255416,
"count": 300
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 4.026399731636047,
"min": -58.27240002155304,
"max": 56.102199494838715,
"count": 300
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 300
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 300
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017844852724980834,
"min": 0.010616855310703006,
"max": 0.023935369885293767,
"count": 145
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017844852724980834,
"min": 0.010616855310703006,
"max": 0.023935369885293767,
"count": 145
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10248677084843318,
"min": 0.08636121278007826,
"max": 0.11994984075427055,
"count": 145
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10248677084843318,
"min": 0.08636121278007826,
"max": 0.11994984075427055,
"count": 145
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.1036402314901352,
"min": 0.08740595479806264,
"max": 0.12132183661063513,
"count": 145
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.1036402314901352,
"min": 0.08740595479806264,
"max": 0.12132183661063513,
"count": 145
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 145
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 145
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 145
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 145
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 145
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 145
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1675936350",
"python_version": "3.9.16 (main, Feb 6 2023, 20:06:40) \n[GCC 9.3.0]",
"command_line_arguments": "/usr/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1675948555"
},
"total": 12204.264974599995,
"count": 1,
"self": 0.42385389999253675,
"children": {
"run_training.setup": {
"total": 0.02615309995599091,
"count": 1,
"self": 0.02615309995599091
},
"TrainerController.start_learning": {
"total": 12203.814967600047,
"count": 1,
"self": 7.076828409160953,
"children": {
"TrainerController._reset_env": {
"total": 1.183405400137417,
"count": 16,
"self": 1.183405400137417
},
"TrainerController.advance": {
"total": 12194.689710790757,
"count": 207678,
"self": 7.580822688236367,
"children": {
"env_step": {
"total": 6618.124010004976,
"count": 207678,
"self": 5753.581308200839,
"children": {
"SubprocessEnvManager._take_step": {
"total": 860.1265352042392,
"count": 207678,
"self": 36.37521053070668,
"children": {
"TorchPolicy.evaluate": {
"total": 823.7513246735325,
"count": 376530,
"self": 823.7513246735325
}
}
},
"workers": {
"total": 4.416166599898133,
"count": 207678,
"self": 0.0,
"children": {
"worker_root": {
"total": 12188.440052606165,
"count": 207678,
"is_parallel": true,
"self": 7159.6416861004545,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.003249599947594106,
"count": 2,
"is_parallel": true,
"self": 0.00077479996252805,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0024747999850660563,
"count": 8,
"is_parallel": true,
"self": 0.0024747999850660563
}
}
},
"UnityEnvironment.step": {
"total": 0.03128100000321865,
"count": 1,
"is_parallel": true,
"self": 0.000546799972653389,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00041690003126859665,
"count": 1,
"is_parallel": true,
"self": 0.00041690003126859665
},
"communicator.exchange": {
"total": 0.02869730000384152,
"count": 1,
"is_parallel": true,
"self": 0.02869730000384152
},
"steps_from_proto": {
"total": 0.0016199999954551458,
"count": 2,
"is_parallel": true,
"self": 0.00037319993134588003,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012468000641092658,
"count": 8,
"is_parallel": true,
"self": 0.0012468000641092658
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.033048400131519884,
"count": 30,
"is_parallel": true,
"self": 0.007254699652548879,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.025793700478971004,
"count": 120,
"is_parallel": true,
"self": 0.025793700478971004
}
}
},
"UnityEnvironment.step": {
"total": 5028.765318105579,
"count": 207677,
"is_parallel": true,
"self": 157.2845634209225,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 147.5874905093806,
"count": 207677,
"is_parallel": true,
"self": 147.5874905093806
},
"communicator.exchange": {
"total": 4247.264710290881,
"count": 207677,
"is_parallel": true,
"self": 4247.264710290881
},
"steps_from_proto": {
"total": 476.62855388439493,
"count": 415354,
"is_parallel": true,
"self": 106.98626380431233,
"children": {
"_process_rank_one_or_two_observation": {
"total": 369.6422900800826,
"count": 1661416,
"is_parallel": true,
"self": 369.6422900800826
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 5568.984878097544,
"count": 207678,
"self": 41.25628210080322,
"children": {
"process_trajectory": {
"total": 947.8661081971368,
"count": 207678,
"self": 946.6546150970971,
"children": {
"RLTrainer._checkpoint": {
"total": 1.211493100039661,
"count": 6,
"self": 1.211493100039661
}
}
},
"_update_policy": {
"total": 4579.862487799604,
"count": 145,
"self": 548.5033831004985,
"children": {
"TorchPOCAOptimizer.update": {
"total": 4031.359104699106,
"count": 4350,
"self": 4031.359104699106
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.400010660290718e-06,
"count": 1,
"self": 1.400010660290718e-06
},
"TrainerController._save_models": {
"total": 0.8650215999805368,
"count": 1,
"self": 0.0018364000134170055,
"children": {
"RLTrainer._checkpoint": {
"total": 0.8631851999671198,
"count": 1,
"self": 0.8631851999671198
}
}
}
}
}
}
}