ppo-Huggy / run_logs /training_status.json
h-d-h's picture
Huggy
3927d7f
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199905,
"file_path": "results/Huggy/Huggy/Huggy-199905.onnx",
"reward": 3.184600167460256,
"creation_time": 1686834048.578919,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199905.pt"
]
},
{
"steps": 399774,
"file_path": "results/Huggy/Huggy/Huggy-399774.onnx",
"reward": 3.8877386239858773,
"creation_time": 1686834301.1599653,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399774.pt"
]
},
{
"steps": 599989,
"file_path": "results/Huggy/Huggy/Huggy-599989.onnx",
"reward": 3.277740001678467,
"creation_time": 1686834562.8043778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599989.pt"
]
},
{
"steps": 799975,
"file_path": "results/Huggy/Huggy/Huggy-799975.onnx",
"reward": 4.00450069052261,
"creation_time": 1686834818.3173776,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799975.pt"
]
},
{
"steps": 999933,
"file_path": "results/Huggy/Huggy/Huggy-999933.onnx",
"reward": 4.078178751785143,
"creation_time": 1686835074.3466322,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999933.pt"
]
},
{
"steps": 1199935,
"file_path": "results/Huggy/Huggy/Huggy-1199935.onnx",
"reward": 4.072751335901756,
"creation_time": 1686835329.0531876,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199935.pt"
]
},
{
"steps": 1399876,
"file_path": "results/Huggy/Huggy/Huggy-1399876.onnx",
"reward": 4.661350965499878,
"creation_time": 1686835582.1554508,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399876.pt"
]
},
{
"steps": 1599937,
"file_path": "results/Huggy/Huggy/Huggy-1599937.onnx",
"reward": 3.928912965891262,
"creation_time": 1686835832.714756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599937.pt"
]
},
{
"steps": 1799995,
"file_path": "results/Huggy/Huggy/Huggy-1799995.onnx",
"reward": 3.862717500754765,
"creation_time": 1686836081.3148026,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799995.pt"
]
},
{
"steps": 1999971,
"file_path": "results/Huggy/Huggy/Huggy-1999971.onnx",
"reward": 3.9873966982490137,
"creation_time": 1686836327.0869608,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999971.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 3.98493983622255,
"creation_time": 1686836327.2029443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.98493983622255,
"creation_time": 1686836327.2029443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}