ppo-Huggy / run_logs /training_status.json
dgodderis's picture
Huggy
8ff9165
raw
history blame contribute delete
No virus
4.36 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199992,
"file_path": "results/Huggy/Huggy/Huggy-199992.onnx",
"reward": 3.536755780704686,
"creation_time": 1676735558.5410933,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199992.pt"
]
},
{
"steps": 399928,
"file_path": "results/Huggy/Huggy/Huggy-399928.onnx",
"reward": 3.708587210911971,
"creation_time": 1676735787.5162103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399928.pt"
]
},
{
"steps": 599986,
"file_path": "results/Huggy/Huggy/Huggy-599986.onnx",
"reward": 3.889959140827781,
"creation_time": 1676736020.270726,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599986.pt"
]
},
{
"steps": 799974,
"file_path": "results/Huggy/Huggy/Huggy-799974.onnx",
"reward": 3.7712752670049667,
"creation_time": 1676736249.2284813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799974.pt"
]
},
{
"steps": 999923,
"file_path": "results/Huggy/Huggy/Huggy-999923.onnx",
"reward": 3.7874865409369782,
"creation_time": 1676736479.742301,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999923.pt"
]
},
{
"steps": 1199971,
"file_path": "results/Huggy/Huggy/Huggy-1199971.onnx",
"reward": 3.662391553322474,
"creation_time": 1676736711.8386776,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199971.pt"
]
},
{
"steps": 1399966,
"file_path": "results/Huggy/Huggy/Huggy-1399966.onnx",
"reward": 3.6066407390062323,
"creation_time": 1676736943.8774526,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399966.pt"
]
},
{
"steps": 1599942,
"file_path": "results/Huggy/Huggy/Huggy-1599942.onnx",
"reward": 3.717764865410955,
"creation_time": 1676737175.6217291,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599942.pt"
]
},
{
"steps": 1799789,
"file_path": "results/Huggy/Huggy/Huggy-1799789.onnx",
"reward": 3.6203896469540067,
"creation_time": 1676737405.953133,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799789.pt"
]
},
{
"steps": 1999988,
"file_path": "results/Huggy/Huggy/Huggy-1999988.onnx",
"reward": null,
"creation_time": 1676737635.4957187,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999988.pt"
]
},
{
"steps": 2000086,
"file_path": "results/Huggy/Huggy/Huggy-2000086.onnx",
"reward": 6.741436004638672,
"creation_time": 1676737635.6128325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000086.pt"
]
}
],
"final_checkpoint": {
"steps": 2000086,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 6.741436004638672,
"creation_time": 1676737635.6128325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000086.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}