poca-SoccerTwos / run_logs /timers.json
rbrgAlou's picture
First Push
72cc007
raw
history blame
No virus
20.2 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.4101879596710205,
"min": 1.332898497581482,
"max": 3.2957260608673096,
"count": 4999
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 26082.8359375,
"min": 22169.224609375,
"max": 136329.171875,
"count": 4999
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 66.66666666666667,
"min": 41.96521739130435,
"max": 999.0,
"count": 4999
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19200.0,
"min": 16408.0,
"max": 31968.0,
"count": 4999
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1573.4164997834623,
"min": 1186.6917021576141,
"max": 1669.2404820206275,
"count": 4982
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 226571.9759688186,
"min": 2385.2321208535777,
"max": 374084.48853586055,
"count": 4982
},
"SoccerTwos.Step.mean": {
"value": 49999855.0,
"min": 9300.0,
"max": 49999855.0,
"count": 5000
},
"SoccerTwos.Step.sum": {
"value": 49999855.0,
"min": 9300.0,
"max": 49999855.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.013657771982252598,
"min": -0.1438334584236145,
"max": 0.15958504378795624,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -1.980376958847046,
"min": -22.568527221679688,
"max": 22.907155990600586,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.013520356267690659,
"min": -0.14296653866767883,
"max": 0.16129207611083984,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -1.960451602935791,
"min": -23.43581199645996,
"max": 23.799938201904297,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.0393379310081745,
"min": -0.7995407404722991,
"max": 0.46243846072600436,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -5.703999996185303,
"min": -68.21680003404617,
"max": 62.27440017461777,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.0393379310081745,
"min": -0.7995407404722991,
"max": 0.46243846072600436,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -5.703999996185303,
"min": -68.21680003404617,
"max": 62.27440017461777,
"count": 5000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01482800834637601,
"min": 0.009856824992554417,
"max": 0.025938480199935537,
"count": 2423
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01482800834637601,
"min": 0.009856824992554417,
"max": 0.025938480199935537,
"count": 2423
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09889063363273938,
"min": 9.758156763079266e-05,
"max": 0.1255539710323016,
"count": 2423
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09889063363273938,
"min": 9.758156763079266e-05,
"max": 0.1255539710323016,
"count": 2423
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.09956269562244416,
"min": 9.800574043765664e-05,
"max": 0.12865835502743722,
"count": 2423
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.09956269562244416,
"min": 9.800574043765664e-05,
"max": 0.12865835502743722,
"count": 2423
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2423
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2423
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 2423
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 2423
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 2423
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 2423
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1704379062",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/home/external-rosia/miniconda3/envs/rl/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1704510825"
},
"total": 131762.20611413568,
"count": 1,
"self": 0.3711693393997848,
"children": {
"run_training.setup": {
"total": 0.017532712314277887,
"count": 1,
"self": 0.017532712314277887
},
"TrainerController.start_learning": {
"total": 131761.81741208397,
"count": 1,
"self": 46.809156748466194,
"children": {
"TrainerController._reset_env": {
"total": 9.621022965759039,
"count": 250,
"self": 9.621022965759039
},
"TrainerController.advance": {
"total": 131705.24929426098,
"count": 3442754,
"self": 47.45445933472365,
"children": {
"env_step": {
"total": 115003.77731117373,
"count": 3442754,
"self": 98212.8201123355,
"children": {
"SubprocessEnvManager._take_step": {
"total": 16761.12573245028,
"count": 3442754,
"self": 363.5279528698884,
"children": {
"TorchPolicy.evaluate": {
"total": 16397.597779580392,
"count": 6282076,
"self": 16397.597779580392
}
}
},
"workers": {
"total": 29.831466387957335,
"count": 3442754,
"self": 0.0,
"children": {
"worker_root": {
"total": 131569.22664459608,
"count": 3442754,
"is_parallel": true,
"self": 41519.95227844734,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0070021990686655045,
"count": 2,
"is_parallel": true,
"self": 0.0016497056931257248,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.00535249337553978,
"count": 8,
"is_parallel": true,
"self": 0.00535249337553978
}
}
},
"UnityEnvironment.step": {
"total": 0.03543186094611883,
"count": 1,
"is_parallel": true,
"self": 0.00166629021987319,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0012752348557114601,
"count": 1,
"is_parallel": true,
"self": 0.0012752348557114601
},
"communicator.exchange": {
"total": 0.027705941814929247,
"count": 1,
"is_parallel": true,
"self": 0.027705941814929247
},
"steps_from_proto": {
"total": 0.004784394055604935,
"count": 2,
"is_parallel": true,
"self": 0.0009212824515998363,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0038631116040050983,
"count": 8,
"is_parallel": true,
"self": 0.0038631116040050983
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 90048.13488004962,
"count": 3442753,
"is_parallel": true,
"self": 5340.922976269387,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 3693.6092337705195,
"count": 3442753,
"is_parallel": true,
"self": 3693.6092337705195
},
"communicator.exchange": {
"total": 65930.58416355588,
"count": 3442753,
"is_parallel": true,
"self": 65930.58416355588
},
"steps_from_proto": {
"total": 15083.018506453838,
"count": 6885506,
"is_parallel": true,
"self": 2741.9420437025838,
"children": {
"_process_rank_one_or_two_observation": {
"total": 12341.076462751254,
"count": 27542024,
"is_parallel": true,
"self": 12341.076462751254
}
}
}
}
},
"steps_from_proto": {
"total": 1.13948609912768,
"count": 498,
"is_parallel": true,
"self": 0.20297252899035811,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.936513570137322,
"count": 1992,
"is_parallel": true,
"self": 0.936513570137322
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 16654.017523752525,
"count": 3442754,
"self": 367.28710281895474,
"children": {
"process_trajectory": {
"total": 7652.751466519665,
"count": 3442754,
"self": 7638.177386100404,
"children": {
"RLTrainer._checkpoint": {
"total": 14.574080419261009,
"count": 100,
"self": 14.574080419261009
}
}
},
"_update_policy": {
"total": 8633.978954413906,
"count": 2423,
"self": 5363.2341864197515,
"children": {
"TorchPOCAOptimizer.update": {
"total": 3270.7447679941542,
"count": 72702,
"self": 3270.7447679941542
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.149072527885437e-07,
"count": 1,
"self": 8.149072527885437e-07
},
"TrainerController._save_models": {
"total": 0.13793729385361075,
"count": 1,
"self": 0.001317872665822506,
"children": {
"RLTrainer._checkpoint": {
"total": 0.13661942118778825,
"count": 1,
"self": 0.13661942118778825
}
}
}
}
}
}
}