Huggy / run_logs /training_status.json
Adrianosoprano's picture
Huggy
19566d4
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199848,
"file_path": "results/Huggy/Huggy/Huggy-199848.onnx",
"reward": 3.390182314677672,
"creation_time": 1692598210.3006542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199848.pt"
]
},
{
"steps": 399950,
"file_path": "results/Huggy/Huggy/Huggy-399950.onnx",
"reward": 3.8007997683116366,
"creation_time": 1692598458.478427,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399950.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.6773895896398106,
"creation_time": 1692598707.6807575,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799981,
"file_path": "results/Huggy/Huggy/Huggy-799981.onnx",
"reward": 3.7381703012610137,
"creation_time": 1692598955.3465993,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799981.pt"
]
},
{
"steps": 999945,
"file_path": "results/Huggy/Huggy/Huggy-999945.onnx",
"reward": 4.009901209277396,
"creation_time": 1692599201.9140773,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999945.pt"
]
},
{
"steps": 1199959,
"file_path": "results/Huggy/Huggy/Huggy-1199959.onnx",
"reward": 3.8754336153759676,
"creation_time": 1692599462.9281929,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199959.pt"
]
},
{
"steps": 1399893,
"file_path": "results/Huggy/Huggy/Huggy-1399893.onnx",
"reward": 2.8223944352223325,
"creation_time": 1692599726.3167078,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399893.pt"
]
},
{
"steps": 1599894,
"file_path": "results/Huggy/Huggy/Huggy-1599894.onnx",
"reward": 4.057913652901511,
"creation_time": 1692599980.2411478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599894.pt"
]
},
{
"steps": 1799934,
"file_path": "results/Huggy/Huggy/Huggy-1799934.onnx",
"reward": 3.9525179206896173,
"creation_time": 1692600234.0072224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799934.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy/Huggy/Huggy-1999991.onnx",
"reward": 3.5834307927031848,
"creation_time": 1692600493.299231,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000025,
"file_path": "results/Huggy/Huggy/Huggy-2000025.onnx",
"reward": 3.5640715271577066,
"creation_time": 1692600493.428723,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
],
"final_checkpoint": {
"steps": 2000025,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5640715271577066,
"creation_time": 1692600493.428723,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}