ppo-Huggy / run_logs /training_status.json
dfsj's picture
Huggy
96573db
{
"Huggy": {
"checkpoints": [
{
"steps": 199792,
"file_path": "results/Huggy/Huggy/Huggy-199792.onnx",
"reward": 3.377128677184765,
"creation_time": 1671775697.221605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199792.pt"
]
},
{
"steps": 399920,
"file_path": "results/Huggy/Huggy/Huggy-399920.onnx",
"reward": 3.6633610078266687,
"creation_time": 1671775915.5547707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399920.pt"
]
},
{
"steps": 599961,
"file_path": "results/Huggy/Huggy/Huggy-599961.onnx",
"reward": 3.8024248860099097,
"creation_time": 1671776135.586885,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599961.pt"
]
},
{
"steps": 799856,
"file_path": "results/Huggy/Huggy/Huggy-799856.onnx",
"reward": 3.7629033434176873,
"creation_time": 1671776356.1411963,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799856.pt"
]
},
{
"steps": 999464,
"file_path": "results/Huggy/Huggy/Huggy-999464.onnx",
"reward": 3.814821870752083,
"creation_time": 1671776578.6334996,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999464.pt"
]
},
{
"steps": 1199299,
"file_path": "results/Huggy/Huggy/Huggy-1199299.onnx",
"reward": 3.8033862225711346,
"creation_time": 1671776800.5883853,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199299.pt"
]
},
{
"steps": 1399949,
"file_path": "results/Huggy/Huggy/Huggy-1399949.onnx",
"reward": 3.720768229100714,
"creation_time": 1671777021.3902183,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399949.pt"
]
},
{
"steps": 1599954,
"file_path": "results/Huggy/Huggy/Huggy-1599954.onnx",
"reward": 3.456810037782587,
"creation_time": 1671777243.1650953,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599954.pt"
]
},
{
"steps": 1799995,
"file_path": "results/Huggy/Huggy/Huggy-1799995.onnx",
"reward": 3.7005713755308194,
"creation_time": 1671777467.3606575,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799995.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy/Huggy/Huggy-1999981.onnx",
"reward": 2.978668825966971,
"creation_time": 1671777691.5470266,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000082,
"file_path": "results/Huggy/Huggy/Huggy-2000082.onnx",
"reward": 3.2909108996391296,
"creation_time": 1671777691.6733813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000082.pt"
]
}
],
"final_checkpoint": {
"steps": 2000082,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2909108996391296,
"creation_time": 1671777691.6733813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000082.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}