ppo-Huggy / run_logs /training_status.json
Andrei Mitrofan
Huggy
76ab6ec
{
"Huggy": {
"checkpoints": [
{
"steps": 199888,
"file_path": "results/Huggy/Huggy/Huggy-199888.onnx",
"reward": 3.846054460139985,
"creation_time": 1672136823.6442792,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199888.pt"
]
},
{
"steps": 399638,
"file_path": "results/Huggy/Huggy/Huggy-399638.onnx",
"reward": 3.5568460187186366,
"creation_time": 1672137044.9167888,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399638.pt"
]
},
{
"steps": 599914,
"file_path": "results/Huggy/Huggy/Huggy-599914.onnx",
"reward": 3.675060345758846,
"creation_time": 1672137264.1173675,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599914.pt"
]
},
{
"steps": 799896,
"file_path": "results/Huggy/Huggy/Huggy-799896.onnx",
"reward": 3.7925355657935143,
"creation_time": 1672137489.7003675,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799896.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy/Huggy/Huggy-999960.onnx",
"reward": 3.9221427297362914,
"creation_time": 1672137708.8173175,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199996,
"file_path": "results/Huggy/Huggy/Huggy-1199996.onnx",
"reward": 3.236453005761811,
"creation_time": 1672137934.6363614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199996.pt"
]
},
{
"steps": 1399535,
"file_path": "results/Huggy/Huggy/Huggy-1399535.onnx",
"reward": 3.8566211952170746,
"creation_time": 1672138155.267767,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399535.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy/Huggy/Huggy-1599944.onnx",
"reward": 3.944614048515047,
"creation_time": 1672138379.0239532,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799937,
"file_path": "results/Huggy/Huggy/Huggy-1799937.onnx",
"reward": 3.8045326966505786,
"creation_time": 1672138603.6806858,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799937.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 3.8452266521187894,
"creation_time": 1672138825.031096,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000044,
"file_path": "results/Huggy/Huggy/Huggy-2000044.onnx",
"reward": 3.8457173917974745,
"creation_time": 1672138825.1519692,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
],
"final_checkpoint": {
"steps": 2000044,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8457173917974745,
"creation_time": 1672138825.1519692,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}