ppo-Huggy / run_logs /training_status.json
xszhou's picture
Huggy
9547ed5
{
"Huggy": {
"checkpoints": [
{
"steps": 199758,
"file_path": "results/Huggy/Huggy/Huggy-199758.onnx",
"reward": 3.525910476843516,
"creation_time": 1693804460.3714733,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199758.pt"
]
},
{
"steps": 399752,
"file_path": "results/Huggy/Huggy/Huggy-399752.onnx",
"reward": 4.2051286419232685,
"creation_time": 1693804715.7208538,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399752.pt"
]
},
{
"steps": 599955,
"file_path": "results/Huggy/Huggy/Huggy-599955.onnx",
"reward": 3.2335087060928345,
"creation_time": 1693804975.9790857,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599955.pt"
]
},
{
"steps": 799938,
"file_path": "results/Huggy/Huggy/Huggy-799938.onnx",
"reward": 3.7031381379413064,
"creation_time": 1693805234.5739508,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799938.pt"
]
},
{
"steps": 999985,
"file_path": "results/Huggy/Huggy/Huggy-999985.onnx",
"reward": 3.7880079952939862,
"creation_time": 1693805497.0286887,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999985.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.4735354555064233,
"creation_time": 1693805762.1516511,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399623,
"file_path": "results/Huggy/Huggy/Huggy-1399623.onnx",
"reward": 3.8575507592071188,
"creation_time": 1693806022.6128705,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399623.pt"
]
},
{
"steps": 1599975,
"file_path": "results/Huggy/Huggy/Huggy-1599975.onnx",
"reward": 3.694617099643494,
"creation_time": 1693806290.123151,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599975.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy/Huggy/Huggy-1799933.onnx",
"reward": 4.285590188604005,
"creation_time": 1693806556.2446113,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999957,
"file_path": "results/Huggy/Huggy/Huggy-1999957.onnx",
"reward": 5.008019129435222,
"creation_time": 1693806823.4370983,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999957.pt"
]
},
{
"steps": 2000019,
"file_path": "results/Huggy/Huggy/Huggy-2000019.onnx",
"reward": 4.476809978485107,
"creation_time": 1693806823.6427133,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
],
"final_checkpoint": {
"steps": 2000019,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.476809978485107,
"creation_time": 1693806823.6427133,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}