ppo-Huggy / run_logs /training_status.json
Cheekydave's picture
Huggy
b9f8cdd
{
"Huggy": {
"checkpoints": [
{
"steps": 199825,
"file_path": "results/Huggy/Huggy/Huggy-199825.onnx",
"reward": 3.698978693980091,
"creation_time": 1701683827.5555396,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199825.pt"
]
},
{
"steps": 399932,
"file_path": "results/Huggy/Huggy/Huggy-399932.onnx",
"reward": 4.12438810339161,
"creation_time": 1701684083.2597213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399932.pt"
]
},
{
"steps": 599881,
"file_path": "results/Huggy/Huggy/Huggy-599881.onnx",
"reward": null,
"creation_time": 1701684342.127822,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599881.pt"
]
},
{
"steps": 799916,
"file_path": "results/Huggy/Huggy/Huggy-799916.onnx",
"reward": 3.8531276218878117,
"creation_time": 1701684604.2983248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799916.pt"
]
},
{
"steps": 999976,
"file_path": "results/Huggy/Huggy/Huggy-999976.onnx",
"reward": 3.8836831556302367,
"creation_time": 1701684869.720715,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999976.pt"
]
},
{
"steps": 1199520,
"file_path": "results/Huggy/Huggy/Huggy-1199520.onnx",
"reward": 4.246414137192262,
"creation_time": 1701685136.511636,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199520.pt"
]
},
{
"steps": 1399931,
"file_path": "results/Huggy/Huggy/Huggy-1399931.onnx",
"reward": 3.509270244353526,
"creation_time": 1701685399.2409134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399931.pt"
]
},
{
"steps": 1599938,
"file_path": "results/Huggy/Huggy/Huggy-1599938.onnx",
"reward": 3.6761425296207526,
"creation_time": 1701685666.759298,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599938.pt"
]
},
{
"steps": 1799975,
"file_path": "results/Huggy/Huggy/Huggy-1799975.onnx",
"reward": 3.8162170755557523,
"creation_time": 1701685936.176224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799975.pt"
]
},
{
"steps": 1999939,
"file_path": "results/Huggy/Huggy/Huggy-1999939.onnx",
"reward": 3.9456878867414265,
"creation_time": 1701686203.4008296,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999939.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 3.883874576342733,
"creation_time": 1701686203.5137084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.883874576342733,
"creation_time": 1701686203.5137084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}