ppo-Huggy / run_logs /training_status.json
Panyongcan
Huggy
60ee6e1
{
"Huggy": {
"checkpoints": [
{
"steps": 199882,
"file_path": "results/Huggy/Huggy/Huggy-199882.onnx",
"reward": 3.3491736430011385,
"creation_time": 1679447602.429636,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199882.pt"
]
},
{
"steps": 399974,
"file_path": "results/Huggy/Huggy/Huggy-399974.onnx",
"reward": 3.4515503722688425,
"creation_time": 1679447829.0633035,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399974.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy/Huggy/Huggy-599992.onnx",
"reward": 3.346853326106894,
"creation_time": 1679448057.8004756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799499,
"file_path": "results/Huggy/Huggy/Huggy-799499.onnx",
"reward": 3.7093039997095287,
"creation_time": 1679448285.5791736,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799499.pt"
]
},
{
"steps": 999958,
"file_path": "results/Huggy/Huggy/Huggy-999958.onnx",
"reward": 4.244661676982218,
"creation_time": 1679448516.2584558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999958.pt"
]
},
{
"steps": 1199759,
"file_path": "results/Huggy/Huggy/Huggy-1199759.onnx",
"reward": 3.7789275008771153,
"creation_time": 1679448747.8750074,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199759.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy/Huggy/Huggy-1399999.onnx",
"reward": 4.127469171994719,
"creation_time": 1679448978.2341619,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599946,
"file_path": "results/Huggy/Huggy/Huggy-1599946.onnx",
"reward": 3.9372409546014033,
"creation_time": 1679449211.8006325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599946.pt"
]
},
{
"steps": 1799989,
"file_path": "results/Huggy/Huggy/Huggy-1799989.onnx",
"reward": 3.8666786805237874,
"creation_time": 1679449444.7231915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799989.pt"
]
},
{
"steps": 1999928,
"file_path": "results/Huggy/Huggy/Huggy-1999928.onnx",
"reward": 3.7269542747073703,
"creation_time": 1679449678.1357133,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999928.pt"
]
},
{
"steps": 2000056,
"file_path": "results/Huggy/Huggy/Huggy-2000056.onnx",
"reward": 3.7658449496541704,
"creation_time": 1679449678.2512925,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000056.pt"
]
}
],
"final_checkpoint": {
"steps": 2000056,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7658449496541704,
"creation_time": 1679449678.2512925,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000056.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}