ppo-Huggy / run_logs /training_status.json
blackeys's picture
Huggy
8e30f96
{
"Huggy": {
"checkpoints": [
{
"steps": 199729,
"file_path": "results/Huggy/Huggy/Huggy-199729.onnx",
"reward": 3.354358458864516,
"creation_time": 1683219973.0454402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199729.pt"
]
},
{
"steps": 399979,
"file_path": "results/Huggy/Huggy/Huggy-399979.onnx",
"reward": 3.8040038259824116,
"creation_time": 1683220218.8501308,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399979.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy/Huggy/Huggy-599942.onnx",
"reward": 4.344780568565641,
"creation_time": 1683220470.9011574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799932,
"file_path": "results/Huggy/Huggy/Huggy-799932.onnx",
"reward": 3.9098078224756945,
"creation_time": 1683220717.0013735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799932.pt"
]
},
{
"steps": 999935,
"file_path": "results/Huggy/Huggy/Huggy-999935.onnx",
"reward": 3.878446882168452,
"creation_time": 1683220971.5511765,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999935.pt"
]
},
{
"steps": 1199935,
"file_path": "results/Huggy/Huggy/Huggy-1199935.onnx",
"reward": 3.7726314647556984,
"creation_time": 1683221224.551401,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199935.pt"
]
},
{
"steps": 1399925,
"file_path": "results/Huggy/Huggy/Huggy-1399925.onnx",
"reward": 3.711073398590088,
"creation_time": 1683221483.318369,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399925.pt"
]
},
{
"steps": 1599919,
"file_path": "results/Huggy/Huggy/Huggy-1599919.onnx",
"reward": 4.042704039745116,
"creation_time": 1683221734.104557,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599919.pt"
]
},
{
"steps": 1799611,
"file_path": "results/Huggy/Huggy/Huggy-1799611.onnx",
"reward": 3.9247448981263253,
"creation_time": 1683221983.9526522,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799611.pt"
]
},
{
"steps": 1999984,
"file_path": "results/Huggy/Huggy/Huggy-1999984.onnx",
"reward": 3.662198680819887,
"creation_time": 1683222230.6004755,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999984.pt"
]
},
{
"steps": 2000052,
"file_path": "results/Huggy/Huggy/Huggy-2000052.onnx",
"reward": 3.6747988373485962,
"creation_time": 1683222230.7248807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000052.pt"
]
}
],
"final_checkpoint": {
"steps": 2000052,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6747988373485962,
"creation_time": 1683222230.7248807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000052.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}