ppo-Huggy / run_logs /training_status.json
Luvidi's picture
Huggy
5591b8a
{
"Huggy": {
"checkpoints": [
{
"steps": 199761,
"file_path": "results/Huggy/Huggy/Huggy-199761.onnx",
"reward": 3.4365629578979924,
"creation_time": 1672748495.9923959,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199761.pt"
]
},
{
"steps": 399831,
"file_path": "results/Huggy/Huggy/Huggy-399831.onnx",
"reward": 3.362950755426517,
"creation_time": 1672748737.3829854,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399831.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy/Huggy/Huggy-599942.onnx",
"reward": 3.683068118597332,
"creation_time": 1672748976.8309572,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799926,
"file_path": "results/Huggy/Huggy/Huggy-799926.onnx",
"reward": 3.7422314753789387,
"creation_time": 1672749215.0432673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799926.pt"
]
},
{
"steps": 999952,
"file_path": "results/Huggy/Huggy/Huggy-999952.onnx",
"reward": 3.264396755619252,
"creation_time": 1672749462.8157456,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999952.pt"
]
},
{
"steps": 1199416,
"file_path": "results/Huggy/Huggy/Huggy-1199416.onnx",
"reward": 3.4441296259562173,
"creation_time": 1672749716.3811126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199416.pt"
]
},
{
"steps": 1399955,
"file_path": "results/Huggy/Huggy/Huggy-1399955.onnx",
"reward": 3.7003696033212004,
"creation_time": 1672749953.0808036,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399955.pt"
]
},
{
"steps": 1599951,
"file_path": "results/Huggy/Huggy/Huggy-1599951.onnx",
"reward": 3.6905892700343936,
"creation_time": 1672750192.1506636,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599951.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 3.5652323645281503,
"creation_time": 1672750431.7813363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999870,
"file_path": "results/Huggy/Huggy/Huggy-1999870.onnx",
"reward": 3.7847243679894342,
"creation_time": 1672750670.5588982,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999870.pt"
]
},
{
"steps": 2000620,
"file_path": "results/Huggy/Huggy/Huggy-2000620.onnx",
"reward": 3.5416003380502974,
"creation_time": 1672750670.709005,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000620.pt"
]
}
],
"final_checkpoint": {
"steps": 2000620,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5416003380502974,
"creation_time": 1672750670.709005,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000620.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}