ppo-Huggy / run_logs /training_status.json
Pearson's picture
Huggy
4cb8d33
{
"Huggy": {
"checkpoints": [
{
"steps": 199968,
"file_path": "results/Huggy/Huggy/Huggy-199968.onnx",
"reward": 3.4323127416464,
"creation_time": 1675531363.515647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199968.pt"
]
},
{
"steps": 399944,
"file_path": "results/Huggy/Huggy/Huggy-399944.onnx",
"reward": 3.889873323837916,
"creation_time": 1675531585.911283,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399944.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy/Huggy/Huggy-599927.onnx",
"reward": 3.923423571586609,
"creation_time": 1675531811.649946,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799895,
"file_path": "results/Huggy/Huggy/Huggy-799895.onnx",
"reward": 3.760259374038323,
"creation_time": 1675532039.87804,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799895.pt"
]
},
{
"steps": 999647,
"file_path": "results/Huggy/Huggy/Huggy-999647.onnx",
"reward": 3.501073805765174,
"creation_time": 1675532268.7838469,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999647.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy/Huggy/Huggy-1199985.onnx",
"reward": 3.305245569774083,
"creation_time": 1675532493.6300766,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399988,
"file_path": "results/Huggy/Huggy/Huggy-1399988.onnx",
"reward": 3.6533684285911354,
"creation_time": 1675532709.7509196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399988.pt"
]
},
{
"steps": 1599563,
"file_path": "results/Huggy/Huggy/Huggy-1599563.onnx",
"reward": 3.8440820093740498,
"creation_time": 1675532933.0437677,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599563.pt"
]
},
{
"steps": 1799943,
"file_path": "results/Huggy/Huggy/Huggy-1799943.onnx",
"reward": 3.7800222039222717,
"creation_time": 1675533155.33331,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799943.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 3.510092602962512,
"creation_time": 1675533374.0988345,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000055,
"file_path": "results/Huggy/Huggy/Huggy-2000055.onnx",
"reward": 3.5088088898431686,
"creation_time": 1675533374.2102933,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
],
"final_checkpoint": {
"steps": 2000055,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5088088898431686,
"creation_time": 1675533374.2102933,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}