Worm-PPO / run_logs /training_status.json
ibadrehman's picture
with 10M steps
c7908f8
{
"Worm": {
"checkpoints": [
{
"steps": 5499000,
"file_path": "results/Worm1/Worm/Worm-5499000.onnx",
"reward": 864.1680162217882,
"creation_time": 1677346126.5942814,
"auxillary_file_paths": [
"results/Worm1/Worm/Worm-5499000.pt"
]
},
{
"steps": 5999000,
"file_path": "results/Worm1/Worm/Worm-5999000.onnx",
"reward": 872.6929005589978,
"creation_time": 1677346754.5913653,
"auxillary_file_paths": [
"results/Worm1/Worm/Worm-5999000.pt"
]
},
{
"steps": 6499000,
"file_path": "results/Worm1/Worm/Worm-6499000.onnx",
"reward": 886.2488403320312,
"creation_time": 1677347384.3362055,
"auxillary_file_paths": [
"results/Worm1/Worm/Worm-6499000.pt"
]
},
{
"steps": 6999000,
"file_path": "results/Worm1/Worm/Worm-6999000.onnx",
"reward": 890.8352796766493,
"creation_time": 1677348005.7821567,
"auxillary_file_paths": [
"results/Worm1/Worm/Worm-6999000.pt"
]
},
{
"steps": 7010000,
"file_path": "results/Worm1/Worm/Worm-7010000.onnx",
"reward": 912.6154235839844,
"creation_time": 1677348016.3224113,
"auxillary_file_paths": [
"results/Worm1/Worm/Worm-7010000.pt"
]
}
],
"final_checkpoint": {
"steps": 7010000,
"file_path": "results/Worm1/Worm.onnx",
"reward": 912.6154235839844,
"creation_time": 1677348016.3224113,
"auxillary_file_paths": [
"results/Worm1/Worm/Worm-7010000.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}