ppo-Huggy / run_logs /training_status.json
HunterLanier's picture
Huggy
234a306
{
"Huggy": {
"checkpoints": [
{
"steps": 46991,
"file_path": "results/Huggy/Huggy/Huggy-46991.onnx",
"reward": 2.3901576720751248,
"creation_time": 1690251747.180469,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-46991.pt"
]
},
{
"steps": 199967,
"file_path": "results/Huggy/Huggy/Huggy-199967.onnx",
"reward": 3.3250638517466458,
"creation_time": 1690252017.3739548,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399957,
"file_path": "results/Huggy/Huggy/Huggy-399957.onnx",
"reward": 3.1793198453055487,
"creation_time": 1690252345.0391257,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399957.pt"
]
},
{
"steps": 599967,
"file_path": "results/Huggy/Huggy/Huggy-599967.onnx",
"reward": 3.91256948775309,
"creation_time": 1690252661.3101263,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599967.pt"
]
},
{
"steps": 599998,
"file_path": "results/Huggy/Huggy/Huggy-599998.onnx",
"reward": 1.1371034979820251,
"creation_time": 1690252687.0557926,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599998.pt"
]
},
{
"steps": 799940,
"file_path": "results/Huggy/Huggy/Huggy-799940.onnx",
"reward": 4.111079511385929,
"creation_time": 1690253013.0416858,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799940.pt"
]
},
{
"steps": 999983,
"file_path": "results/Huggy/Huggy/Huggy-999983.onnx",
"reward": 4.107720810826085,
"creation_time": 1690253341.669964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999983.pt"
]
},
{
"steps": 1199966,
"file_path": "results/Huggy/Huggy/Huggy-1199966.onnx",
"reward": 3.943529609590769,
"creation_time": 1690253672.321036,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199966.pt"
]
},
{
"steps": 1399954,
"file_path": "results/Huggy/Huggy/Huggy-1399954.onnx",
"reward": 3.8197274973808235,
"creation_time": 1690254002.5435793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399954.pt"
]
},
{
"steps": 1599430,
"file_path": "results/Huggy/Huggy/Huggy-1599430.onnx",
"reward": 3.93263783914591,
"creation_time": 1690254337.6294305,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599430.pt"
]
},
{
"steps": 1799921,
"file_path": "results/Huggy/Huggy/Huggy-1799921.onnx",
"reward": 3.5424075707411156,
"creation_time": 1690254667.1082265,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799921.pt"
]
},
{
"steps": 1799970,
"file_path": "results/Huggy/Huggy/Huggy-1799970.onnx",
"reward": 2.4854273796081543,
"creation_time": 1690254877.7227497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799970.pt"
]
},
{
"steps": 1999974,
"file_path": "results/Huggy/Huggy/Huggy-1999974.onnx",
"reward": 4.0395429472590605,
"creation_time": 1690255206.6390946,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999974.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 4.037456303283021,
"creation_time": 1690255206.8677385,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.037456303283021,
"creation_time": 1690255206.8677385,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}