ppo-Huggy / run_logs /training_status.json
gensym's picture
Huggy
42eeefd
{
"Huggy": {
"checkpoints": [
{
"steps": 199704,
"file_path": "results/Huggy/Huggy/Huggy-199704.onnx",
"reward": 3.505941923350504,
"creation_time": 1684809909.980818,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199704.pt"
]
},
{
"steps": 399917,
"file_path": "results/Huggy/Huggy/Huggy-399917.onnx",
"reward": 4.052444142453811,
"creation_time": 1684810173.1391478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399917.pt"
]
},
{
"steps": 599976,
"file_path": "results/Huggy/Huggy/Huggy-599976.onnx",
"reward": 3.6782128088402026,
"creation_time": 1684810439.0618517,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599976.pt"
]
},
{
"steps": 799995,
"file_path": "results/Huggy/Huggy/Huggy-799995.onnx",
"reward": 3.6454151430630795,
"creation_time": 1684810695.4761403,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799995.pt"
]
},
{
"steps": 999942,
"file_path": "results/Huggy/Huggy/Huggy-999942.onnx",
"reward": 3.625333970314578,
"creation_time": 1684810953.1160297,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999942.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy/Huggy/Huggy-1199970.onnx",
"reward": 4.182958119875424,
"creation_time": 1684811211.6491137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399988,
"file_path": "results/Huggy/Huggy/Huggy-1399988.onnx",
"reward": 3.670449660374568,
"creation_time": 1684811472.231357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399988.pt"
]
},
{
"steps": 1599309,
"file_path": "results/Huggy/Huggy/Huggy-1599309.onnx",
"reward": 4.064227928235693,
"creation_time": 1684811728.7371497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599309.pt"
]
},
{
"steps": 1799993,
"file_path": "results/Huggy/Huggy/Huggy-1799993.onnx",
"reward": 3.989393007830254,
"creation_time": 1684811993.7446778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799993.pt"
]
},
{
"steps": 1999354,
"file_path": "results/Huggy/Huggy/Huggy-1999354.onnx",
"reward": 4.126788970082998,
"creation_time": 1684812253.37151,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999354.pt"
]
},
{
"steps": 2000104,
"file_path": "results/Huggy/Huggy/Huggy-2000104.onnx",
"reward": 4.013630305803739,
"creation_time": 1684812253.6024423,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
],
"final_checkpoint": {
"steps": 2000104,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.013630305803739,
"creation_time": 1684812253.6024423,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}