ppo-Huggy / run_logs /training_status.json
arrandi's picture
Huggy
edc728b
{
"Huggy": {
"checkpoints": [
{
"steps": 199927,
"file_path": "results/Huggy/Huggy/Huggy-199927.onnx",
"reward": 3.174501000028668,
"creation_time": 1671543244.215365,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199927.pt"
]
},
{
"steps": 399986,
"file_path": "results/Huggy/Huggy/Huggy-399986.onnx",
"reward": 3.761377736926079,
"creation_time": 1671543460.1918683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399986.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy/Huggy/Huggy-599963.onnx",
"reward": 3.679702648749718,
"creation_time": 1671543679.320609,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799971,
"file_path": "results/Huggy/Huggy/Huggy-799971.onnx",
"reward": 3.7899345797919186,
"creation_time": 1671543897.1233091,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799971.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 3.928588105850861,
"creation_time": 1671544117.6760411,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy/Huggy/Huggy-1199974.onnx",
"reward": 3.994959833130004,
"creation_time": 1671544339.0479295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399988,
"file_path": "results/Huggy/Huggy/Huggy-1399988.onnx",
"reward": null,
"creation_time": 1671544561.1984565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399988.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy/Huggy/Huggy-1599964.onnx",
"reward": 3.8183395330261134,
"creation_time": 1671544781.9910934,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799434,
"file_path": "results/Huggy/Huggy/Huggy-1799434.onnx",
"reward": 3.8320062577083545,
"creation_time": 1671545004.9298174,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799434.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy/Huggy/Huggy-1999952.onnx",
"reward": 3.2020247247483997,
"creation_time": 1671545226.479982,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000046,
"file_path": "results/Huggy/Huggy/Huggy-2000046.onnx",
"reward": 3.2570797204971313,
"creation_time": 1671545226.5987453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000046.pt"
]
}
],
"final_checkpoint": {
"steps": 2000046,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2570797204971313,
"creation_time": 1671545226.5987453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000046.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}