ppo-Huggy / run_logs /training_status.json
lukelarue's picture
Huggy
26085ac
{
"Huggy": {
"checkpoints": [
{
"steps": 199789,
"file_path": "results/Huggy/Huggy/Huggy-199789.onnx",
"reward": 3.5766633367165923,
"creation_time": 1690504849.1895337,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199789.pt"
]
},
{
"steps": 399935,
"file_path": "results/Huggy/Huggy/Huggy-399935.onnx",
"reward": 3.8437008439604914,
"creation_time": 1690505102.9140701,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399935.pt"
]
},
{
"steps": 599947,
"file_path": "results/Huggy/Huggy/Huggy-599947.onnx",
"reward": 4.063943310787804,
"creation_time": 1690505360.1682923,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599947.pt"
]
},
{
"steps": 799862,
"file_path": "results/Huggy/Huggy/Huggy-799862.onnx",
"reward": 3.7023853978371237,
"creation_time": 1690505612.1936228,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799862.pt"
]
},
{
"steps": 999988,
"file_path": "results/Huggy/Huggy/Huggy-999988.onnx",
"reward": 3.9478677867187395,
"creation_time": 1690505874.205001,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999988.pt"
]
},
{
"steps": 1199973,
"file_path": "results/Huggy/Huggy/Huggy-1199973.onnx",
"reward": 4.022717007181861,
"creation_time": 1690506136.869432,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199973.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy/Huggy/Huggy-1399990.onnx",
"reward": 3.9826608254359317,
"creation_time": 1690506403.3707213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599984,
"file_path": "results/Huggy/Huggy/Huggy-1599984.onnx",
"reward": 3.9318766422099896,
"creation_time": 1690506667.2129192,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599984.pt"
]
},
{
"steps": 1799958,
"file_path": "results/Huggy/Huggy/Huggy-1799958.onnx",
"reward": 3.8804682791233063,
"creation_time": 1690506934.8809643,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799958.pt"
]
},
{
"steps": 1999931,
"file_path": "results/Huggy/Huggy/Huggy-1999931.onnx",
"reward": 3.884384616972908,
"creation_time": 1690507201.2892509,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999931.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 3.9237867519259453,
"creation_time": 1690507201.4112325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9237867519259453,
"creation_time": 1690507201.4112325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}