ppo-SnowballTarget-v2 / run_logs /training_status.json
morganjeffries's picture
I swear I did this before
4204b12
{
"SnowballTarget": {
"checkpoints": [
{
"steps": 99960,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-99960.onnx",
"reward": 9.242424242424242,
"creation_time": 1676836579.8710277,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-99960.pt"
]
},
{
"steps": 149984,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-149984.onnx",
"reward": 13.181818181818182,
"creation_time": 1676836694.400086,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-149984.pt"
]
},
{
"steps": 199984,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-199984.onnx",
"reward": null,
"creation_time": 1676836805.2549517,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-199984.pt"
]
},
{
"steps": 249944,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-249944.onnx",
"reward": 18.522727272727273,
"creation_time": 1676836913.6898775,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-249944.pt"
]
},
{
"steps": 299968,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-299968.onnx",
"reward": 20.21212121212121,
"creation_time": 1676837025.96719,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-299968.pt"
]
},
{
"steps": 349992,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-349992.onnx",
"reward": 22.363636363636363,
"creation_time": 1676837136.252653,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-349992.pt"
]
},
{
"steps": 399992,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-399992.onnx",
"reward": 23.90909090909091,
"creation_time": 1676837244.6015959,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-399992.pt"
]
},
{
"steps": 449952,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-449952.onnx",
"reward": null,
"creation_time": 1676837355.6309803,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-449952.pt"
]
},
{
"steps": 499976,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-499976.onnx",
"reward": 24.022727272727273,
"creation_time": 1676837462.4758642,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-499976.pt"
]
},
{
"steps": 500104,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-500104.onnx",
"reward": 24.022727272727273,
"creation_time": 1676837462.6890075,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-500104.pt"
]
}
],
"final_checkpoint": {
"steps": 500104,
"file_path": "results/SnowballTarget1/SnowballTarget.onnx",
"reward": 24.022727272727273,
"creation_time": 1676837462.6890075,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-500104.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}