ppo-Huggy / run_logs /training_status.json
FnSK4R17s's picture
Huggy
df5c199
{
"Huggy": {
"checkpoints": [
{
"steps": 199722,
"file_path": "results/Huggy/Huggy/Huggy-199722.onnx",
"reward": 3.7218838669359684,
"creation_time": 1672849920.0685377,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199722.pt"
]
},
{
"steps": 399935,
"file_path": "results/Huggy/Huggy/Huggy-399935.onnx",
"reward": 3.7273190149239133,
"creation_time": 1672850145.3899176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399935.pt"
]
},
{
"steps": 599896,
"file_path": "results/Huggy/Huggy/Huggy-599896.onnx",
"reward": 3.396654076235635,
"creation_time": 1672850371.5282366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599896.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy/Huggy/Huggy-799980.onnx",
"reward": 3.7965801257007525,
"creation_time": 1672850599.4035695,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999936,
"file_path": "results/Huggy/Huggy/Huggy-999936.onnx",
"reward": 3.748723717072071,
"creation_time": 1672850832.2016563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999936.pt"
]
},
{
"steps": 1199937,
"file_path": "results/Huggy/Huggy/Huggy-1199937.onnx",
"reward": 3.7044762631632246,
"creation_time": 1672851064.3953645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199937.pt"
]
},
{
"steps": 1399972,
"file_path": "results/Huggy/Huggy/Huggy-1399972.onnx",
"reward": 3.5798521861433983,
"creation_time": 1672851300.2731297,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399972.pt"
]
},
{
"steps": 1599997,
"file_path": "results/Huggy/Huggy/Huggy-1599997.onnx",
"reward": 3.9102162628234187,
"creation_time": 1672851534.5796177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599997.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy/Huggy/Huggy-1799988.onnx",
"reward": 3.7684903995839156,
"creation_time": 1672851768.5764813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy/Huggy/Huggy-1999981.onnx",
"reward": 4.092224320438173,
"creation_time": 1672851999.8939397,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000100,
"file_path": "results/Huggy/Huggy/Huggy-2000100.onnx",
"reward": 4.139625830309732,
"creation_time": 1672852000.0235136,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000100.pt"
]
}
],
"final_checkpoint": {
"steps": 2000100,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.139625830309732,
"creation_time": 1672852000.0235136,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000100.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}