ppo-Huggy / run_logs /training_status.json
m00nbase's picture
Huggy
c3c978c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199698,
"file_path": "results/Huggy2/Huggy/Huggy-199698.onnx",
"reward": 3.081982187205745,
"creation_time": 1719927828.808561,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199698.pt"
]
},
{
"steps": 399914,
"file_path": "results/Huggy2/Huggy/Huggy-399914.onnx",
"reward": 3.7804218005325834,
"creation_time": 1719928073.2881017,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399914.pt"
]
},
{
"steps": 599943,
"file_path": "results/Huggy2/Huggy/Huggy-599943.onnx",
"reward": 3.78343259877172,
"creation_time": 1719928320.4222097,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599943.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy2/Huggy/Huggy-799982.onnx",
"reward": 3.840164286799209,
"creation_time": 1719928564.6939638,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999640,
"file_path": "results/Huggy2/Huggy/Huggy-999640.onnx",
"reward": 3.6994690033607185,
"creation_time": 1719928812.5920734,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999640.pt"
]
},
{
"steps": 1199841,
"file_path": "results/Huggy2/Huggy/Huggy-1199841.onnx",
"reward": 3.546501166383985,
"creation_time": 1719929064.735121,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199841.pt"
]
},
{
"steps": 1399862,
"file_path": "results/Huggy2/Huggy/Huggy-1399862.onnx",
"reward": 3.9985499382019043,
"creation_time": 1719929324.2037249,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399862.pt"
]
},
{
"steps": 1599962,
"file_path": "results/Huggy2/Huggy/Huggy-1599962.onnx",
"reward": 3.517457799400602,
"creation_time": 1719929570.2356925,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599962.pt"
]
},
{
"steps": 1799992,
"file_path": "results/Huggy2/Huggy/Huggy-1799992.onnx",
"reward": 3.862015335261822,
"creation_time": 1719929818.5223503,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799992.pt"
]
},
{
"steps": 1999999,
"file_path": "results/Huggy2/Huggy/Huggy-1999999.onnx",
"reward": 4.116968026527991,
"creation_time": 1719930073.9882283,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999999.pt"
]
},
{
"steps": 2000128,
"file_path": "results/Huggy2/Huggy/Huggy-2000128.onnx",
"reward": 4.30272468498775,
"creation_time": 1719930074.118385,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000128.pt"
]
}
],
"final_checkpoint": {
"steps": 2000128,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.30272468498775,
"creation_time": 1719930074.118385,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000128.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}