ppo-Huggy / run_logs /training_status.json
bhunkeler's picture
Huggy
8cd7c24
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199958,
"file_path": "results/Huggy/Huggy/Huggy-199958.onnx",
"reward": 3.358674817271047,
"creation_time": 1696628152.7608387,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199958.pt"
]
},
{
"steps": 399950,
"file_path": "results/Huggy/Huggy/Huggy-399950.onnx",
"reward": 3.781089071380465,
"creation_time": 1696628423.5496864,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399950.pt"
]
},
{
"steps": 599911,
"file_path": "results/Huggy/Huggy/Huggy-599911.onnx",
"reward": 3.795554707127233,
"creation_time": 1696628701.3600984,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599911.pt"
]
},
{
"steps": 799987,
"file_path": "results/Huggy/Huggy/Huggy-799987.onnx",
"reward": 3.7080858802534844,
"creation_time": 1696628983.0244896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799987.pt"
]
},
{
"steps": 999958,
"file_path": "results/Huggy/Huggy/Huggy-999958.onnx",
"reward": 3.604090537171106,
"creation_time": 1696629282.1169345,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999958.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy/Huggy/Huggy-1199978.onnx",
"reward": 4.079902846079606,
"creation_time": 1696629572.1023352,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399922,
"file_path": "results/Huggy/Huggy/Huggy-1399922.onnx",
"reward": 3.4229391564925513,
"creation_time": 1696629867.6932707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399922.pt"
]
},
{
"steps": 1599425,
"file_path": "results/Huggy/Huggy/Huggy-1599425.onnx",
"reward": 3.6698979787337476,
"creation_time": 1696630161.6750422,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599425.pt"
]
},
{
"steps": 1799906,
"file_path": "results/Huggy/Huggy/Huggy-1799906.onnx",
"reward": 3.8537705120493153,
"creation_time": 1696630457.1328707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799906.pt"
]
},
{
"steps": 1999974,
"file_path": "results/Huggy/Huggy/Huggy-1999974.onnx",
"reward": 4.0759181289349575,
"creation_time": 1696630753.6559055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999974.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy/Huggy/Huggy-2000016.onnx",
"reward": 4.0478626648585,
"creation_time": 1696630753.8235412,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.0478626648585,
"creation_time": 1696630753.8235412,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "2.0.1+cu118"
}
}