ppo-Huggy / run_logs /training_status.json
chirbard's picture
Huggy
10d877b verified
raw
history blame contribute delete
No virus
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199662,
"file_path": "results/Huggy2/Huggy/Huggy-199662.onnx",
"reward": 3.5972873670980334,
"creation_time": 1712819744.5716412,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199662.pt"
]
},
{
"steps": 399908,
"file_path": "results/Huggy2/Huggy/Huggy-399908.onnx",
"reward": 3.693460939294201,
"creation_time": 1712819981.405933,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399908.pt"
]
},
{
"steps": 599931,
"file_path": "results/Huggy2/Huggy/Huggy-599931.onnx",
"reward": 3.5833987474441527,
"creation_time": 1712820223.5508347,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599931.pt"
]
},
{
"steps": 799961,
"file_path": "results/Huggy2/Huggy/Huggy-799961.onnx",
"reward": 3.6212772907298287,
"creation_time": 1712820466.9579835,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799961.pt"
]
},
{
"steps": 999914,
"file_path": "results/Huggy2/Huggy/Huggy-999914.onnx",
"reward": 4.070140746674796,
"creation_time": 1712820710.9681087,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999914.pt"
]
},
{
"steps": 1199846,
"file_path": "results/Huggy2/Huggy/Huggy-1199846.onnx",
"reward": 4.0771140165627004,
"creation_time": 1712820963.0792983,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199846.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy2/Huggy/Huggy-1399992.onnx",
"reward": 4.146114375856188,
"creation_time": 1712821211.0175955,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599951,
"file_path": "results/Huggy2/Huggy/Huggy-1599951.onnx",
"reward": 3.778593865900092,
"creation_time": 1712821451.150347,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599951.pt"
]
},
{
"steps": 1799949,
"file_path": "results/Huggy2/Huggy/Huggy-1799949.onnx",
"reward": 3.4867334388337046,
"creation_time": 1712821696.922758,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799949.pt"
]
},
{
"steps": 1999974,
"file_path": "results/Huggy2/Huggy/Huggy-1999974.onnx",
"reward": 3.3825531293605935,
"creation_time": 1712821941.485443,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999974.pt"
]
},
{
"steps": 2000028,
"file_path": "results/Huggy2/Huggy/Huggy-2000028.onnx",
"reward": 3.337542943159739,
"creation_time": 1712821941.6524994,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000028.pt"
]
}
],
"final_checkpoint": {
"steps": 2000028,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.337542943159739,
"creation_time": 1712821941.6524994,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000028.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}