ppo-Huggy / run_logs /training_status.json
justlotw's picture
Huggy
828453d
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199805,
"file_path": "results/Huggy/Huggy/Huggy-199805.onnx",
"reward": 3.238604138447688,
"creation_time": 1676374501.7365344,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199805.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy/Huggy/Huggy-399934.onnx",
"reward": 3.62714213683055,
"creation_time": 1676374747.8591182,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599997,
"file_path": "results/Huggy/Huggy/Huggy-599997.onnx",
"reward": 3.6023425641267197,
"creation_time": 1676374996.9842293,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599997.pt"
]
},
{
"steps": 799922,
"file_path": "results/Huggy/Huggy/Huggy-799922.onnx",
"reward": 3.7493293426253578,
"creation_time": 1676375241.98066,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799922.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 3.872001526817199,
"creation_time": 1676375487.7368252,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199975,
"file_path": "results/Huggy/Huggy/Huggy-1199975.onnx",
"reward": 3.5492294422678046,
"creation_time": 1676375741.9345806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199975.pt"
]
},
{
"steps": 1399832,
"file_path": "results/Huggy/Huggy/Huggy-1399832.onnx",
"reward": 3.9339053443784993,
"creation_time": 1676375995.9155202,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399832.pt"
]
},
{
"steps": 1599927,
"file_path": "results/Huggy/Huggy/Huggy-1599927.onnx",
"reward": 3.975197356322716,
"creation_time": 1676376249.1116974,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599927.pt"
]
},
{
"steps": 1799425,
"file_path": "results/Huggy/Huggy/Huggy-1799425.onnx",
"reward": 3.2001154117095165,
"creation_time": 1676376497.541339,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799425.pt"
]
},
{
"steps": 1999532,
"file_path": "results/Huggy/Huggy/Huggy-1999532.onnx",
"reward": 3.5323115156756506,
"creation_time": 1676376743.9782884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999532.pt"
]
},
{
"steps": 2000282,
"file_path": "results/Huggy/Huggy/Huggy-2000282.onnx",
"reward": 3.2023692601605465,
"creation_time": 1676376744.1273475,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000282.pt"
]
}
],
"final_checkpoint": {
"steps": 2000282,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2023692601605465,
"creation_time": 1676376744.1273475,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000282.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}