ppo-Huggy / run_logs /training_status.json
macb's picture
Huggy
927a060
{
"Huggy": {
"checkpoints": [
{
"steps": 199801,
"file_path": "results/Huggy/Huggy/Huggy-199801.onnx",
"reward": 3.5099475795114543,
"creation_time": 1677006667.8684912,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199801.pt"
]
},
{
"steps": 399837,
"file_path": "results/Huggy/Huggy/Huggy-399837.onnx",
"reward": 4.013044872650734,
"creation_time": 1677006896.685132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399837.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy/Huggy/Huggy-599927.onnx",
"reward": 3.3528820037841798,
"creation_time": 1677007131.2080112,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799395,
"file_path": "results/Huggy/Huggy/Huggy-799395.onnx",
"reward": 3.8379702350096916,
"creation_time": 1677007361.1161754,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799395.pt"
]
},
{
"steps": 999663,
"file_path": "results/Huggy/Huggy/Huggy-999663.onnx",
"reward": 3.5314175008742277,
"creation_time": 1677007597.513285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999663.pt"
]
},
{
"steps": 1199377,
"file_path": "results/Huggy/Huggy/Huggy-1199377.onnx",
"reward": 3.6173533727140987,
"creation_time": 1677007830.4448638,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199377.pt"
]
},
{
"steps": 1399557,
"file_path": "results/Huggy/Huggy/Huggy-1399557.onnx",
"reward": 3.7087759485115877,
"creation_time": 1677008062.8427393,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399557.pt"
]
},
{
"steps": 1599902,
"file_path": "results/Huggy/Huggy/Huggy-1599902.onnx",
"reward": 3.8519918438864917,
"creation_time": 1677008299.7825184,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599902.pt"
]
},
{
"steps": 1799915,
"file_path": "results/Huggy/Huggy/Huggy-1799915.onnx",
"reward": 3.457738896733836,
"creation_time": 1677008537.7199109,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799915.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy/Huggy/Huggy-1999978.onnx",
"reward": 3.8113217287593417,
"creation_time": 1677008782.1136165,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000110,
"file_path": "results/Huggy/Huggy/Huggy-2000110.onnx",
"reward": 3.96804015887411,
"creation_time": 1677008782.2682426,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
],
"final_checkpoint": {
"steps": 2000110,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.96804015887411,
"creation_time": 1677008782.2682426,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}