poca-SoccerTwos / run_logs /timers.json
marcos995's picture
First Push`
3417e3c
raw
history blame contribute delete
No virus
15.6 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.956847906112671,
"min": 1.8903225660324097,
"max": 3.2957544326782227,
"count": 500
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 36193.859375,
"min": 16475.078125,
"max": 110030.84375,
"count": 500
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 61.46835443037975,
"min": 42.077586206896555,
"max": 922.2,
"count": 500
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19424.0,
"min": 15164.0,
"max": 27952.0,
"count": 500
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1617.661671165019,
"min": 1186.9501604636207,
"max": 1626.558679460036,
"count": 500
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 255590.544044073,
"min": 2373.9003209272414,
"max": 357645.9681102721,
"count": 500
},
"SoccerTwos.Step.mean": {
"value": 4999957.0,
"min": 9992.0,
"max": 4999957.0,
"count": 500
},
"SoccerTwos.Step.sum": {
"value": 4999957.0,
"min": 9992.0,
"max": 4999957.0,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.003471237374469638,
"min": -0.1024646908044815,
"max": 0.17885024845600128,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -0.5519267320632935,
"min": -17.510154724121094,
"max": 28.07948875427246,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.004224833566695452,
"min": -0.1098845973610878,
"max": 0.18746569752693176,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.6717485189437866,
"min": -17.836885452270508,
"max": 29.43211555480957,
"count": 500
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 500
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.05302389837660879,
"min": -0.6146086972692738,
"max": 0.3608169009987737,
"count": 500
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -8.430799841880798,
"min": -49.49279987812042,
"max": 53.87280023097992,
"count": 500
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.05302389837660879,
"min": -0.6146086972692738,
"max": 0.3608169009987737,
"count": 500
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -8.430799841880798,
"min": -49.49279987812042,
"max": 53.87280023097992,
"count": 500
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 500
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 500
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01674836305901408,
"min": 0.011782423831755295,
"max": 0.025071226629370356,
"count": 241
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01674836305901408,
"min": 0.011782423831755295,
"max": 0.025071226629370356,
"count": 241
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10502038598060608,
"min": 0.0009461536421440542,
"max": 0.11359390541911125,
"count": 241
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10502038598060608,
"min": 0.0009461536421440542,
"max": 0.11359390541911125,
"count": 241
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10742061858375868,
"min": 0.0009570246906757045,
"max": 0.11576529120405515,
"count": 241
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10742061858375868,
"min": 0.0009570246906757045,
"max": 0.11576529120405515,
"count": 241
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 241
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 241
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 241
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 241
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 241
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 241
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1680275911",
"python_version": "3.9.16 (main, Mar 8 2023, 10:39:24) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\Marcos\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --force --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.13.1+cu116",
"numpy_version": "1.21.2",
"end_time_seconds": "1680288296"
},
"total": 8787.4974666,
"count": 1,
"self": 0.4041261999973358,
"children": {
"run_training.setup": {
"total": 0.08115060000000085,
"count": 1,
"self": 0.08115060000000085
},
"TrainerController.start_learning": {
"total": 8787.012189800002,
"count": 1,
"self": 6.358041899795353,
"children": {
"TrainerController._reset_env": {
"total": 4.259427300002088,
"count": 25,
"self": 4.259427300002088
},
"TrainerController.advance": {
"total": 8776.229988500205,
"count": 339946,
"self": 5.897761400125091,
"children": {
"env_step": {
"total": 6436.147159500028,
"count": 339946,
"self": 3879.608295799788,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2552.3873122003274,
"count": 339946,
"self": 39.721887800704735,
"children": {
"TorchPolicy.evaluate": {
"total": 2512.6654243996227,
"count": 630012,
"self": 2512.6654243996227
}
}
},
"workers": {
"total": 4.15155149991249,
"count": 339946,
"self": 0.0,
"children": {
"worker_root": {
"total": 8774.894701800024,
"count": 339946,
"is_parallel": true,
"self": 5629.024736999766,
"children": {
"steps_from_proto": {
"total": 0.04208490000067755,
"count": 50,
"is_parallel": true,
"self": 0.007047700005711199,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.03503719999496635,
"count": 200,
"is_parallel": true,
"self": 0.03503719999496635
}
}
},
"UnityEnvironment.step": {
"total": 3145.827879900257,
"count": 339946,
"is_parallel": true,
"self": 161.16132089967869,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 138.14462160026022,
"count": 339946,
"is_parallel": true,
"self": 138.14462160026022
},
"communicator.exchange": {
"total": 2316.0667769002007,
"count": 339946,
"is_parallel": true,
"self": 2316.0667769002007
},
"steps_from_proto": {
"total": 530.4551605001175,
"count": 679892,
"is_parallel": true,
"self": 92.00831239942863,
"children": {
"_process_rank_one_or_two_observation": {
"total": 438.44684810068884,
"count": 2719568,
"is_parallel": true,
"self": 438.44684810068884
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 2334.185067600052,
"count": 339946,
"self": 51.003149799865696,
"children": {
"process_trajectory": {
"total": 1054.4678920001795,
"count": 339946,
"self": 1052.4980656001785,
"children": {
"RLTrainer._checkpoint": {
"total": 1.9698264000010113,
"count": 10,
"self": 1.9698264000010113
}
}
},
"_update_policy": {
"total": 1228.714025800007,
"count": 241,
"self": 765.0402651999807,
"children": {
"TorchPOCAOptimizer.update": {
"total": 463.67376060002636,
"count": 7230,
"self": 463.67376060002636
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2000000424450263e-06,
"count": 1,
"self": 1.2000000424450263e-06
},
"TrainerController._save_models": {
"total": 0.16473089999999502,
"count": 1,
"self": 0.003001000000949716,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1617298999990453,
"count": 1,
"self": 0.1617298999990453
}
}
}
}
}
}
}