ppo-Huggy / run_logs /training_status.json
3braheem's picture
Huggy
d7285a9
{
"Huggy": {
"checkpoints": [
{
"steps": 199710,
"file_path": "results/Huggy/Huggy/Huggy-199710.onnx",
"reward": 3.456955061120502,
"creation_time": 1694983287.028897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199710.pt"
]
},
{
"steps": 399998,
"file_path": "results/Huggy/Huggy/Huggy-399998.onnx",
"reward": 4.060324263216844,
"creation_time": 1694983562.1404667,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399998.pt"
]
},
{
"steps": 599882,
"file_path": "results/Huggy/Huggy/Huggy-599882.onnx",
"reward": 3.896624879837036,
"creation_time": 1694983832.2990098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599882.pt"
]
},
{
"steps": 799399,
"file_path": "results/Huggy/Huggy/Huggy-799399.onnx",
"reward": 4.047748731978146,
"creation_time": 1694984093.5062232,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799399.pt"
]
},
{
"steps": 999976,
"file_path": "results/Huggy/Huggy/Huggy-999976.onnx",
"reward": 3.5644063788039664,
"creation_time": 1694984363.744499,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999976.pt"
]
},
{
"steps": 1199954,
"file_path": "results/Huggy/Huggy/Huggy-1199954.onnx",
"reward": 4.092098294757307,
"creation_time": 1694984631.9576824,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199954.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy/Huggy/Huggy-1399976.onnx",
"reward": 3.641690073899887,
"creation_time": 1694984889.6967673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599945,
"file_path": "results/Huggy/Huggy/Huggy-1599945.onnx",
"reward": 4.043886116449384,
"creation_time": 1694985161.305746,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599945.pt"
]
},
{
"steps": 1799358,
"file_path": "results/Huggy/Huggy/Huggy-1799358.onnx",
"reward": 3.653856501264392,
"creation_time": 1694985432.6799467,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799358.pt"
]
},
{
"steps": 1999333,
"file_path": "results/Huggy/Huggy/Huggy-1999333.onnx",
"reward": 3.515480575727862,
"creation_time": 1694985711.8571582,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999333.pt"
]
},
{
"steps": 2000083,
"file_path": "results/Huggy/Huggy/Huggy-2000083.onnx",
"reward": 3.4865733360250792,
"creation_time": 1694985712.009077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
],
"final_checkpoint": {
"steps": 2000083,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4865733360250792,
"creation_time": 1694985712.009077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}