Huggy-ppo / run_logs /training_status.json
iamandrewliao's picture
Huggy
8611a26
{
"Huggy": {
"checkpoints": [
{
"steps": 199776,
"file_path": "results/Huggy/Huggy/Huggy-199776.onnx",
"reward": 3.627549344034337,
"creation_time": 1703376916.6953769,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199776.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy/Huggy/Huggy-399960.onnx",
"reward": 3.814566563337277,
"creation_time": 1703377155.7970352,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599985,
"file_path": "results/Huggy/Huggy/Huggy-599985.onnx",
"reward": 4.047857644466253,
"creation_time": 1703377399.1576202,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599985.pt"
]
},
{
"steps": 799950,
"file_path": "results/Huggy/Huggy/Huggy-799950.onnx",
"reward": 4.159766858025473,
"creation_time": 1703377644.244294,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799950.pt"
]
},
{
"steps": 999935,
"file_path": "results/Huggy/Huggy/Huggy-999935.onnx",
"reward": 4.034599445660909,
"creation_time": 1703377891.1485274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999935.pt"
]
},
{
"steps": 1199989,
"file_path": "results/Huggy/Huggy/Huggy-1199989.onnx",
"reward": 4.2092928368112315,
"creation_time": 1703378144.6542845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199989.pt"
]
},
{
"steps": 1399947,
"file_path": "results/Huggy/Huggy/Huggy-1399947.onnx",
"reward": 3.7161702315012612,
"creation_time": 1703378400.765856,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399947.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy/Huggy/Huggy-1599944.onnx",
"reward": 3.6196079110797448,
"creation_time": 1703378650.4949431,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799950,
"file_path": "results/Huggy/Huggy/Huggy-1799950.onnx",
"reward": 3.530695963650942,
"creation_time": 1703378903.2598248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799950.pt"
]
},
{
"steps": 1999820,
"file_path": "results/Huggy/Huggy/Huggy-1999820.onnx",
"reward": 3.1169851762907848,
"creation_time": 1703379156.0652409,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999820.pt"
]
},
{
"steps": 2000600,
"file_path": "results/Huggy/Huggy/Huggy-2000600.onnx",
"reward": 2.6987299248576164,
"creation_time": 1703379156.2217307,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000600.pt"
]
}
],
"final_checkpoint": {
"steps": 2000600,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.6987299248576164,
"creation_time": 1703379156.2217307,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000600.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}