ppo-Huggy / run_logs /training_status.json
markafitzgerald1's picture
Huggy
c9d3966
{
"Huggy": {
"checkpoints": [
{
"steps": 199818,
"file_path": "results/Huggy/Huggy/Huggy-199818.onnx",
"reward": 3.2423504770748197,
"creation_time": 1672548461.758425,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199818.pt"
]
},
{
"steps": 399880,
"file_path": "results/Huggy/Huggy/Huggy-399880.onnx",
"reward": 4.143372137591524,
"creation_time": 1672548676.3751013,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399880.pt"
]
},
{
"steps": 599998,
"file_path": "results/Huggy/Huggy/Huggy-599998.onnx",
"reward": 3.538599117235704,
"creation_time": 1672548894.3285627,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599998.pt"
]
},
{
"steps": 799947,
"file_path": "results/Huggy/Huggy/Huggy-799947.onnx",
"reward": 4.266170076057736,
"creation_time": 1672549110.0617948,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799947.pt"
]
},
{
"steps": 999947,
"file_path": "results/Huggy/Huggy/Huggy-999947.onnx",
"reward": 3.956572914368486,
"creation_time": 1672549327.5403247,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999947.pt"
]
},
{
"steps": 1199951,
"file_path": "results/Huggy/Huggy/Huggy-1199951.onnx",
"reward": 3.794895522651218,
"creation_time": 1672549545.7745695,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199951.pt"
]
},
{
"steps": 1399982,
"file_path": "results/Huggy/Huggy/Huggy-1399982.onnx",
"reward": 3.6017089635133743,
"creation_time": 1672549764.067486,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399982.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Huggy/Huggy/Huggy-1599995.onnx",
"reward": 3.918971495860714,
"creation_time": 1672549980.7078393,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy/Huggy/Huggy-1799972.onnx",
"reward": 4.077616895301433,
"creation_time": 1672550200.4105864,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 3.946217917229818,
"creation_time": 1672550421.4802575,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000057,
"file_path": "results/Huggy/Huggy/Huggy-2000057.onnx",
"reward": 3.948967694595296,
"creation_time": 1672550421.600805,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000057.pt"
]
}
],
"final_checkpoint": {
"steps": 2000057,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.948967694595296,
"creation_time": 1672550421.600805,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000057.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}