ppo-Huggy / run_logs /training_status.json
Andyrasika's picture
Huggy
01f4403
raw
history blame contribute delete
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199470,
"file_path": "results/Huggy/Huggy/Huggy-199470.onnx",
"reward": 3.4462557484706244,
"creation_time": 1692535666.835414,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199470.pt"
]
},
{
"steps": 399909,
"file_path": "results/Huggy/Huggy/Huggy-399909.onnx",
"reward": 4.023666910195755,
"creation_time": 1692535846.3391137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399909.pt"
]
},
{
"steps": 599789,
"file_path": "results/Huggy/Huggy/Huggy-599789.onnx",
"reward": 3.702234823128273,
"creation_time": 1692536034.8979743,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599789.pt"
]
},
{
"steps": 799901,
"file_path": "results/Huggy/Huggy/Huggy-799901.onnx",
"reward": 3.665125233786447,
"creation_time": 1692536215.0328062,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799901.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy/Huggy/Huggy-999996.onnx",
"reward": 3.8022969864881957,
"creation_time": 1692536405.2087488,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199986,
"file_path": "results/Huggy/Huggy/Huggy-1199986.onnx",
"reward": 3.5661451478063326,
"creation_time": 1692536600.5311596,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199986.pt"
]
},
{
"steps": 1399356,
"file_path": "results/Huggy/Huggy/Huggy-1399356.onnx",
"reward": 3.6471863985061646,
"creation_time": 1692536794.4849894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399356.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy/Huggy/Huggy-1599970.onnx",
"reward": 3.9300963071288253,
"creation_time": 1692536980.4421396,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799937,
"file_path": "results/Huggy/Huggy/Huggy-1799937.onnx",
"reward": 3.7117775127572834,
"creation_time": 1692537169.704754,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799937.pt"
]
},
{
"steps": 1999947,
"file_path": "results/Huggy/Huggy/Huggy-1999947.onnx",
"reward": 3.625914450486501,
"creation_time": 1692537354.8824446,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999947.pt"
]
},
{
"steps": 2000027,
"file_path": "results/Huggy/Huggy/Huggy-2000027.onnx",
"reward": 3.7254328766176776,
"creation_time": 1692537354.9977505,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000027.pt"
]
}
],
"final_checkpoint": {
"steps": 2000027,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7254328766176776,
"creation_time": 1692537354.9977505,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000027.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}