ppo-Huggy / run_logs /training_status.json
abbiekeats's picture
Huggy
bf0a631
{
"Huggy": {
"checkpoints": [
{
"steps": 199496,
"file_path": "results/Huggy/Huggy/Huggy-199496.onnx",
"reward": 3.5040661389710475,
"creation_time": 1678284695.816772,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199496.pt"
]
},
{
"steps": 399918,
"file_path": "results/Huggy/Huggy/Huggy-399918.onnx",
"reward": 4.0268689384063086,
"creation_time": 1678284930.1384299,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399918.pt"
]
},
{
"steps": 599959,
"file_path": "results/Huggy/Huggy/Huggy-599959.onnx",
"reward": 3.6885597321294967,
"creation_time": 1678285164.9603863,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599959.pt"
]
},
{
"steps": 799986,
"file_path": "results/Huggy/Huggy/Huggy-799986.onnx",
"reward": 3.678864406894993,
"creation_time": 1678285397.784894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799986.pt"
]
},
{
"steps": 999901,
"file_path": "results/Huggy/Huggy/Huggy-999901.onnx",
"reward": 3.6419239790543267,
"creation_time": 1678285631.7406259,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999901.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy/Huggy/Huggy-1199970.onnx",
"reward": 3.1273789464450275,
"creation_time": 1678285864.775583,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399525,
"file_path": "results/Huggy/Huggy/Huggy-1399525.onnx",
"reward": 4.054574424093897,
"creation_time": 1678286096.2219071,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399525.pt"
]
},
{
"steps": 1599985,
"file_path": "results/Huggy/Huggy/Huggy-1599985.onnx",
"reward": 3.7764070916485477,
"creation_time": 1678286330.8483546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599985.pt"
]
},
{
"steps": 1799963,
"file_path": "results/Huggy/Huggy/Huggy-1799963.onnx",
"reward": 3.727472133725603,
"creation_time": 1678286562.670295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799963.pt"
]
},
{
"steps": 1999944,
"file_path": "results/Huggy/Huggy/Huggy-1999944.onnx",
"reward": 4.714142757303574,
"creation_time": 1678286796.1679103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999944.pt"
]
},
{
"steps": 2000059,
"file_path": "results/Huggy/Huggy/Huggy-2000059.onnx",
"reward": 4.717156661881341,
"creation_time": 1678286796.362965,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000059.pt"
]
}
],
"final_checkpoint": {
"steps": 2000059,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.717156661881341,
"creation_time": 1678286796.362965,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000059.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}