ppo-Huggy / run_logs /training_status.json
apparition's picture
Huggy
8e8ad36
{
"Huggy": {
"checkpoints": [
{
"steps": 199743,
"file_path": "results/Huggy/Huggy/Huggy-199743.onnx",
"reward": 3.4021278619766235,
"creation_time": 1679054756.4818618,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199743.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy/Huggy/Huggy-399995.onnx",
"reward": 3.964847445487976,
"creation_time": 1679054991.1636686,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599670,
"file_path": "results/Huggy/Huggy/Huggy-599670.onnx",
"reward": 4.707258081436157,
"creation_time": 1679055229.2886922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599670.pt"
]
},
{
"steps": 799896,
"file_path": "results/Huggy/Huggy/Huggy-799896.onnx",
"reward": 3.660379723913368,
"creation_time": 1679055466.7332997,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799896.pt"
]
},
{
"steps": 999966,
"file_path": "results/Huggy/Huggy/Huggy-999966.onnx",
"reward": 3.735253486902483,
"creation_time": 1679055705.1167917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999966.pt"
]
},
{
"steps": 1199875,
"file_path": "results/Huggy/Huggy/Huggy-1199875.onnx",
"reward": 3.934206848878127,
"creation_time": 1679055946.348977,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199875.pt"
]
},
{
"steps": 1399908,
"file_path": "results/Huggy/Huggy/Huggy-1399908.onnx",
"reward": 3.665196657180786,
"creation_time": 1679056186.3828824,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399908.pt"
]
},
{
"steps": 1599943,
"file_path": "results/Huggy/Huggy/Huggy-1599943.onnx",
"reward": 3.965461870927489,
"creation_time": 1679056424.2278273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599943.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy/Huggy/Huggy-1799933.onnx",
"reward": 3.8000056117267933,
"creation_time": 1679056663.9988484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999449,
"file_path": "results/Huggy/Huggy/Huggy-1999449.onnx",
"reward": 3.8986463057689176,
"creation_time": 1679056902.3022,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999449.pt"
]
},
{
"steps": 2000199,
"file_path": "results/Huggy/Huggy/Huggy-2000199.onnx",
"reward": 3.7092464923858643,
"creation_time": 1679056902.4458597,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000199.pt"
]
}
],
"final_checkpoint": {
"steps": 2000199,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7092464923858643,
"creation_time": 1679056902.4458597,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000199.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}