ppo-Huggy / run_logs /training_status.json
photel's picture
Huggy
2cc968c
{
"Huggy": {
"checkpoints": [
{
"steps": 2712,
"file_path": "results/Huggy/Huggy/Huggy-2712.onnx",
"reward": 1.600289832461964,
"creation_time": 1683703507.5242732,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2712.pt"
]
},
{
"steps": 199666,
"file_path": "results/Huggy/Huggy/Huggy-199666.onnx",
"reward": 3.6159734482114967,
"creation_time": 1683703757.9028628,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199666.pt"
]
},
{
"steps": 399977,
"file_path": "results/Huggy/Huggy/Huggy-399977.onnx",
"reward": 3.7865134355349417,
"creation_time": 1683704005.287616,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399977.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy/Huggy/Huggy-599992.onnx",
"reward": 3.7459407579767836,
"creation_time": 1683704253.9774187,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799899,
"file_path": "results/Huggy/Huggy/Huggy-799899.onnx",
"reward": 3.665117255225778,
"creation_time": 1683704502.8636615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799899.pt"
]
},
{
"steps": 999917,
"file_path": "results/Huggy/Huggy/Huggy-999917.onnx",
"reward": 3.819356248695023,
"creation_time": 1683704756.9437475,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999917.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy/Huggy/Huggy-1199978.onnx",
"reward": 4.248267750848424,
"creation_time": 1683705007.5255609,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399993,
"file_path": "results/Huggy/Huggy/Huggy-1399993.onnx",
"reward": 4.104825229771369,
"creation_time": 1683705256.7284565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399993.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 3.656624405444423,
"creation_time": 1683705513.3131442,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799951,
"file_path": "results/Huggy/Huggy/Huggy-1799951.onnx",
"reward": 3.8541754213876502,
"creation_time": 1683705764.356705,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799951.pt"
]
},
{
"steps": 1999922,
"file_path": "results/Huggy/Huggy/Huggy-1999922.onnx",
"reward": 3.4584899097681046,
"creation_time": 1683706018.3630207,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999922.pt"
]
},
{
"steps": 2000011,
"file_path": "results/Huggy/Huggy/Huggy-2000011.onnx",
"reward": 3.6190185679329767,
"creation_time": 1683706018.5614033,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000011.pt"
]
}
],
"final_checkpoint": {
"steps": 2000011,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6190185679329767,
"creation_time": 1683706018.5614033,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000011.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}