ppo-Huggy / run_logs /training_status.json
Beegbrain's picture
Huggy
e7e9e7e
{
"Huggy": {
"checkpoints": [
{
"steps": 199777,
"file_path": "results/Huggy/Huggy/Huggy-199777.onnx",
"reward": 3.1366176614685664,
"creation_time": 1673192916.668884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199777.pt"
]
},
{
"steps": 399272,
"file_path": "results/Huggy/Huggy/Huggy-399272.onnx",
"reward": 4.365651965141296,
"creation_time": 1673193129.87156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399272.pt"
]
},
{
"steps": 599314,
"file_path": "results/Huggy/Huggy/Huggy-599314.onnx",
"reward": 4.002593696117401,
"creation_time": 1673193346.3906832,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599314.pt"
]
},
{
"steps": 799935,
"file_path": "results/Huggy/Huggy/Huggy-799935.onnx",
"reward": 3.7478506851654787,
"creation_time": 1673193558.5638788,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799935.pt"
]
},
{
"steps": 999932,
"file_path": "results/Huggy/Huggy/Huggy-999932.onnx",
"reward": 3.3865589114927475,
"creation_time": 1673193774.6032522,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999932.pt"
]
},
{
"steps": 1199908,
"file_path": "results/Huggy/Huggy/Huggy-1199908.onnx",
"reward": 2.782830693000971,
"creation_time": 1673193990.1369867,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199908.pt"
]
},
{
"steps": 1399603,
"file_path": "results/Huggy/Huggy/Huggy-1399603.onnx",
"reward": 3.780600820620035,
"creation_time": 1673194201.1775837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399603.pt"
]
},
{
"steps": 1599760,
"file_path": "results/Huggy/Huggy/Huggy-1599760.onnx",
"reward": 3.991335240277377,
"creation_time": 1673194414.998465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599760.pt"
]
},
{
"steps": 1799990,
"file_path": "results/Huggy/Huggy/Huggy-1799990.onnx",
"reward": 3.9926821291446686,
"creation_time": 1673194630.105935,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799990.pt"
]
},
{
"steps": 1999985,
"file_path": "results/Huggy/Huggy/Huggy-1999985.onnx",
"reward": 3.474427350929805,
"creation_time": 1673194845.6489882,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999985.pt"
]
},
{
"steps": 2000071,
"file_path": "results/Huggy/Huggy/Huggy-2000071.onnx",
"reward": 3.6255212386449176,
"creation_time": 1673194845.7680354,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000071.pt"
]
}
],
"final_checkpoint": {
"steps": 2000071,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6255212386449176,
"creation_time": 1673194845.7680354,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000071.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}