ppo-Huggy / run_logs /training_status.json
EXrRor3's picture
Huggy
6e7f10b
{
"Huggy": {
"checkpoints": [
{
"steps": 199960,
"file_path": "results/Huggy/Huggy/Huggy-199960.onnx",
"reward": 3.2966313903981987,
"creation_time": 1690006025.8850784,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199960.pt"
]
},
{
"steps": 399926,
"file_path": "results/Huggy/Huggy/Huggy-399926.onnx",
"reward": 4.013066426400216,
"creation_time": 1690006276.9550974,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399926.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy/Huggy/Huggy-599927.onnx",
"reward": 3.87822039579523,
"creation_time": 1690006533.8569877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799945,
"file_path": "results/Huggy/Huggy/Huggy-799945.onnx",
"reward": 3.794900200329721,
"creation_time": 1690006789.197028,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799945.pt"
]
},
{
"steps": 999846,
"file_path": "results/Huggy/Huggy/Huggy-999846.onnx",
"reward": 3.734510096070034,
"creation_time": 1690007051.691609,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999846.pt"
]
},
{
"steps": 1199800,
"file_path": "results/Huggy/Huggy/Huggy-1199800.onnx",
"reward": 3.8314454400291047,
"creation_time": 1690007310.79171,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199800.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy/Huggy/Huggy-1399969.onnx",
"reward": 3.800573952496052,
"creation_time": 1690007569.5081604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599877,
"file_path": "results/Huggy/Huggy/Huggy-1599877.onnx",
"reward": 3.7099675318438137,
"creation_time": 1690007822.1303475,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599877.pt"
]
},
{
"steps": 1799951,
"file_path": "results/Huggy/Huggy/Huggy-1799951.onnx",
"reward": 3.4244969353956334,
"creation_time": 1690008076.687639,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799951.pt"
]
},
{
"steps": 1999510,
"file_path": "results/Huggy/Huggy/Huggy-1999510.onnx",
"reward": 3.8742238724673235,
"creation_time": 1690008332.4327664,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999510.pt"
]
},
{
"steps": 2000260,
"file_path": "results/Huggy/Huggy/Huggy-2000260.onnx",
"reward": 3.6085324926035747,
"creation_time": 1690008332.5824215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000260.pt"
]
}
],
"final_checkpoint": {
"steps": 2000260,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6085324926035747,
"creation_time": 1690008332.5824215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000260.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}