ppo-Huggy / run_logs /training_status.json
johnnyf's picture
Huggy
9b0db3e verified
raw
history blame
No virus
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199939,
"file_path": "results/Huggy2/Huggy/Huggy-199939.onnx",
"reward": 3.411206789314747,
"creation_time": 1715267864.4508812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199939.pt"
]
},
{
"steps": 399941,
"file_path": "results/Huggy2/Huggy/Huggy-399941.onnx",
"reward": 4.111039072275162,
"creation_time": 1715268283.6898525,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399941.pt"
]
},
{
"steps": 599969,
"file_path": "results/Huggy2/Huggy/Huggy-599969.onnx",
"reward": 3.414448412025676,
"creation_time": 1715268695.3883557,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599969.pt"
]
},
{
"steps": 799855,
"file_path": "results/Huggy2/Huggy/Huggy-799855.onnx",
"reward": 3.7491625401208983,
"creation_time": 1715269088.4460485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799855.pt"
]
},
{
"steps": 999888,
"file_path": "results/Huggy2/Huggy/Huggy-999888.onnx",
"reward": 3.762588985619091,
"creation_time": 1715269503.6658692,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999888.pt"
]
},
{
"steps": 1199943,
"file_path": "results/Huggy2/Huggy/Huggy-1199943.onnx",
"reward": 3.7349914026757083,
"creation_time": 1715269910.4794514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199943.pt"
]
},
{
"steps": 1399963,
"file_path": "results/Huggy2/Huggy/Huggy-1399963.onnx",
"reward": 3.9208652756430884,
"creation_time": 1715270323.1387546,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399963.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy2/Huggy/Huggy-1599992.onnx",
"reward": 3.7720344534160892,
"creation_time": 1715270721.431923,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799915,
"file_path": "results/Huggy2/Huggy/Huggy-1799915.onnx",
"reward": 3.7873576585993622,
"creation_time": 1715271133.6689396,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799915.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy2/Huggy/Huggy-1999991.onnx",
"reward": 4.021707257916851,
"creation_time": 1715271540.2846758,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000104,
"file_path": "results/Huggy2/Huggy/Huggy-2000104.onnx",
"reward": 4.068958426278735,
"creation_time": 1715271540.423368,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000104.pt"
]
}
],
"final_checkpoint": {
"steps": 2000104,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.068958426278735,
"creation_time": 1715271540.423368,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000104.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}