ppo-Huggy / run_logs /training_status.json
anurag629's picture
Huggy
98724e6
{
"Huggy": {
"checkpoints": [
{
"steps": 199894,
"file_path": "results/Huggy/Huggy/Huggy-199894.onnx",
"reward": 3.789275001307003,
"creation_time": 1688293712.882713,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199894.pt"
]
},
{
"steps": 399965,
"file_path": "results/Huggy/Huggy/Huggy-399965.onnx",
"reward": 3.8998451363550473,
"creation_time": 1688293943.1476595,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399965.pt"
]
},
{
"steps": 599949,
"file_path": "results/Huggy/Huggy/Huggy-599949.onnx",
"reward": 3.289754052956899,
"creation_time": 1688294175.6038845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599949.pt"
]
},
{
"steps": 799963,
"file_path": "results/Huggy/Huggy/Huggy-799963.onnx",
"reward": 3.796871502553263,
"creation_time": 1688294405.4604523,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799963.pt"
]
},
{
"steps": 999940,
"file_path": "results/Huggy/Huggy/Huggy-999940.onnx",
"reward": 3.8632906079292297,
"creation_time": 1688294640.8929605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999940.pt"
]
},
{
"steps": 1199954,
"file_path": "results/Huggy/Huggy/Huggy-1199954.onnx",
"reward": 4.209742052214486,
"creation_time": 1688294876.295344,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199954.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy/Huggy/Huggy-1399933.onnx",
"reward": 3.1496703227361045,
"creation_time": 1688295112.846209,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599980,
"file_path": "results/Huggy/Huggy/Huggy-1599980.onnx",
"reward": 3.713481324188637,
"creation_time": 1688295345.2282324,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599980.pt"
]
},
{
"steps": 1799941,
"file_path": "results/Huggy/Huggy/Huggy-1799941.onnx",
"reward": 3.7534887485267703,
"creation_time": 1688295580.7902992,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799941.pt"
]
},
{
"steps": 1999912,
"file_path": "results/Huggy/Huggy/Huggy-1999912.onnx",
"reward": 3.97340074338411,
"creation_time": 1688295816.7131436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999912.pt"
]
},
{
"steps": 2000060,
"file_path": "results/Huggy/Huggy/Huggy-2000060.onnx",
"reward": 4.065211772918701,
"creation_time": 1688295816.839245,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000060.pt"
]
}
],
"final_checkpoint": {
"steps": 2000060,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.065211772918701,
"creation_time": 1688295816.839245,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000060.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}