ppo-Huggy / run_logs /training_status.json
kingducks's picture
Huggy
1810409
{
"Huggy": {
"checkpoints": [
{
"steps": 199771,
"file_path": "results/Huggy/Huggy/Huggy-199771.onnx",
"reward": 3.249272190965712,
"creation_time": 1689643288.763325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199771.pt"
]
},
{
"steps": 399586,
"file_path": "results/Huggy/Huggy/Huggy-399586.onnx",
"reward": 3.717292259951107,
"creation_time": 1689643528.6073668,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399586.pt"
]
},
{
"steps": 599959,
"file_path": "results/Huggy/Huggy/Huggy-599959.onnx",
"reward": 4.027742380445654,
"creation_time": 1689643775.2228966,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599959.pt"
]
},
{
"steps": 799785,
"file_path": "results/Huggy/Huggy/Huggy-799785.onnx",
"reward": 3.6680867933390435,
"creation_time": 1689644017.1709197,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799785.pt"
]
},
{
"steps": 999936,
"file_path": "results/Huggy/Huggy/Huggy-999936.onnx",
"reward": 3.7321487506230673,
"creation_time": 1689644263.5840302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999936.pt"
]
},
{
"steps": 1199932,
"file_path": "results/Huggy/Huggy/Huggy-1199932.onnx",
"reward": 3.5409817561507224,
"creation_time": 1689644507.7319136,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199932.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy/Huggy/Huggy-1399933.onnx",
"reward": 3.6178078537610308,
"creation_time": 1689644751.3224814,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599969,
"file_path": "results/Huggy/Huggy/Huggy-1599969.onnx",
"reward": 3.596569794157277,
"creation_time": 1689644996.8028347,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599969.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy/Huggy/Huggy-1799972.onnx",
"reward": 3.3578535294046206,
"creation_time": 1689645242.256509,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999746,
"file_path": "results/Huggy/Huggy/Huggy-1999746.onnx",
"reward": 3.3434739685058594,
"creation_time": 1689645482.8724375,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999746.pt"
]
},
{
"steps": 2000496,
"file_path": "results/Huggy/Huggy/Huggy-2000496.onnx",
"reward": 3.304727909580761,
"creation_time": 1689645483.0190377,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000496.pt"
]
}
],
"final_checkpoint": {
"steps": 2000496,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.304727909580761,
"creation_time": 1689645483.0190377,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000496.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}