ppo-Huggy / run_logs /training_status.json
Rachel9916's picture
Huggy
563b785 verified
raw
history blame
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199733,
"file_path": "results/Huggy2/Huggy/Huggy-199733.onnx",
"reward": 3.316865605823064,
"creation_time": 1716810033.3032455,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199733.pt"
]
},
{
"steps": 399977,
"file_path": "results/Huggy2/Huggy/Huggy-399977.onnx",
"reward": 3.7552255428754364,
"creation_time": 1716810278.9745216,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399977.pt"
]
},
{
"steps": 599965,
"file_path": "results/Huggy2/Huggy/Huggy-599965.onnx",
"reward": 4.0626235919840195,
"creation_time": 1716810527.9512205,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599965.pt"
]
},
{
"steps": 799963,
"file_path": "results/Huggy2/Huggy/Huggy-799963.onnx",
"reward": 3.9312895921248834,
"creation_time": 1716810775.601885,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799963.pt"
]
},
{
"steps": 999938,
"file_path": "results/Huggy2/Huggy/Huggy-999938.onnx",
"reward": 4.196575729025377,
"creation_time": 1716811026.7522395,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999938.pt"
]
},
{
"steps": 1199959,
"file_path": "results/Huggy2/Huggy/Huggy-1199959.onnx",
"reward": 3.8198605676492057,
"creation_time": 1716811279.2981815,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199959.pt"
]
},
{
"steps": 1399966,
"file_path": "results/Huggy2/Huggy/Huggy-1399966.onnx",
"reward": 3.5526450148650577,
"creation_time": 1716811531.295834,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399966.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy2/Huggy/Huggy-1599965.onnx",
"reward": 3.890235466856352,
"creation_time": 1716811791.8632898,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy2/Huggy/Huggy-1799996.onnx",
"reward": 3.8379337820224464,
"creation_time": 1716812053.145328,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999936,
"file_path": "results/Huggy2/Huggy/Huggy-1999936.onnx",
"reward": 3.5590327884021558,
"creation_time": 1716812315.1030514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999936.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy2/Huggy/Huggy-2000043.onnx",
"reward": 3.6360534857480955,
"creation_time": 1716812315.227632,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6360534857480955,
"creation_time": 1716812315.227632,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}