ppo-Huggy / run_logs /training_status.json
agathanonymous's picture
Huggy
d66ccba verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199836,
"file_path": "results/Huggy2/Huggy/Huggy-199836.onnx",
"reward": 3.218039128317762,
"creation_time": 1716759092.1414645,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199836.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy2/Huggy/Huggy-399985.onnx",
"reward": 3.865598103031516,
"creation_time": 1716759156.853558,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599411,
"file_path": "results/Huggy2/Huggy/Huggy-599411.onnx",
"reward": 3.4007410626662407,
"creation_time": 1716759222.6041431,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599411.pt"
]
},
{
"steps": 799888,
"file_path": "results/Huggy2/Huggy/Huggy-799888.onnx",
"reward": 3.9123667715763557,
"creation_time": 1716759289.2506104,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799888.pt"
]
},
{
"steps": 999951,
"file_path": "results/Huggy2/Huggy/Huggy-999951.onnx",
"reward": 3.736406514342402,
"creation_time": 1716759356.922115,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999951.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy2/Huggy/Huggy-1199963.onnx",
"reward": 4.09952839683084,
"creation_time": 1716759425.8138263,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399901,
"file_path": "results/Huggy2/Huggy/Huggy-1399901.onnx",
"reward": 2.245363473892212,
"creation_time": 1716759493.8547945,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399901.pt"
]
},
{
"steps": 1599875,
"file_path": "results/Huggy2/Huggy/Huggy-1599875.onnx",
"reward": 3.6635785694340712,
"creation_time": 1716759560.2143893,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599875.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy2/Huggy/Huggy-1799988.onnx",
"reward": 3.84132134424497,
"creation_time": 1716759630.183115,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.35029007991155,
"creation_time": 1716759700.5345306,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000130,
"file_path": "results/Huggy2/Huggy/Huggy-2000130.onnx",
"reward": 3.5484132216526914,
"creation_time": 1716759700.5741017,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000130.pt"
]
}
],
"final_checkpoint": {
"steps": 2000130,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5484132216526914,
"creation_time": 1716759700.5741017,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000130.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}