stick_catching_dog / run_logs /training_status.json
GeorgeImmanuel's picture
Stick_catching_Doggy
fd3217e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 37,
"file_path": "results/Huggy/Huggy/Huggy-37.onnx",
"reward": 1.325113296508789,
"creation_time": 1713965870.4494457,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-37.pt"
]
},
{
"steps": 199853,
"file_path": "results/Huggy/Huggy/Huggy-199853.onnx",
"reward": 3.0846564648807915,
"creation_time": 1713966137.3782692,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199853.pt"
]
},
{
"steps": 399997,
"file_path": "results/Huggy/Huggy/Huggy-399997.onnx",
"reward": 3.8603951793450575,
"creation_time": 1713966387.5509272,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399997.pt"
]
},
{
"steps": 599882,
"file_path": "results/Huggy/Huggy/Huggy-599882.onnx",
"reward": 3.5712371706962585,
"creation_time": 1713966652.476831,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599882.pt"
]
},
{
"steps": 799887,
"file_path": "results/Huggy/Huggy/Huggy-799887.onnx",
"reward": 3.9568664384159176,
"creation_time": 1713966916.8245127,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799887.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 3.712007213932599,
"creation_time": 1713967186.0525124,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199542,
"file_path": "results/Huggy/Huggy/Huggy-1199542.onnx",
"reward": 3.839348393678665,
"creation_time": 1713967447.235346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199542.pt"
]
},
{
"steps": 1399725,
"file_path": "results/Huggy/Huggy/Huggy-1399725.onnx",
"reward": 3.966799503494157,
"creation_time": 1713967719.5437322,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399725.pt"
]
},
{
"steps": 1599973,
"file_path": "results/Huggy/Huggy/Huggy-1599973.onnx",
"reward": 4.031748667359352,
"creation_time": 1713967992.3670225,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599973.pt"
]
},
{
"steps": 1799923,
"file_path": "results/Huggy/Huggy/Huggy-1799923.onnx",
"reward": 3.7928134677586733,
"creation_time": 1713968250.821672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799923.pt"
]
},
{
"steps": 1999943,
"file_path": "results/Huggy/Huggy/Huggy-1999943.onnx",
"reward": 3.972564806861262,
"creation_time": 1713968516.221059,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999943.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy/Huggy/Huggy-2000030.onnx",
"reward": 3.996974276378751,
"creation_time": 1713968516.3540006,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.996974276378751,
"creation_time": 1713968516.3540006,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}