ppo-Huggy / run_logs /training_status.json
ellemac's picture
Huggy
3c6517d
{
"Huggy": {
"checkpoints": [
{
"steps": 199965,
"file_path": "results/Huggy/Huggy/Huggy-199965.onnx",
"reward": 3.6493559420108794,
"creation_time": 1687336213.494463,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199965.pt"
]
},
{
"steps": 399347,
"file_path": "results/Huggy/Huggy/Huggy-399347.onnx",
"reward": 3.7473605851658056,
"creation_time": 1687336459.4746768,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399347.pt"
]
},
{
"steps": 599950,
"file_path": "results/Huggy/Huggy/Huggy-599950.onnx",
"reward": 4.028978343520846,
"creation_time": 1687336710.740404,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599950.pt"
]
},
{
"steps": 799963,
"file_path": "results/Huggy/Huggy/Huggy-799963.onnx",
"reward": 3.861327908063298,
"creation_time": 1687336957.9435031,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799963.pt"
]
},
{
"steps": 999927,
"file_path": "results/Huggy/Huggy/Huggy-999927.onnx",
"reward": 3.8809839448621197,
"creation_time": 1687337207.8105447,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999927.pt"
]
},
{
"steps": 1199942,
"file_path": "results/Huggy/Huggy/Huggy-1199942.onnx",
"reward": 3.663247551988153,
"creation_time": 1687337456.641186,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199942.pt"
]
},
{
"steps": 1399391,
"file_path": "results/Huggy/Huggy/Huggy-1399391.onnx",
"reward": 3.7683593489402947,
"creation_time": 1687337703.5229478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399391.pt"
]
},
{
"steps": 1599916,
"file_path": "results/Huggy/Huggy/Huggy-1599916.onnx",
"reward": 3.786644798360373,
"creation_time": 1687337952.9697235,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599916.pt"
]
},
{
"steps": 1799467,
"file_path": "results/Huggy/Huggy/Huggy-1799467.onnx",
"reward": 3.8996544687284365,
"creation_time": 1687338203.016992,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799467.pt"
]
},
{
"steps": 1999920,
"file_path": "results/Huggy/Huggy/Huggy-1999920.onnx",
"reward": 3.8941110968589783,
"creation_time": 1687338455.0804596,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999920.pt"
]
},
{
"steps": 2000055,
"file_path": "results/Huggy/Huggy/Huggy-2000055.onnx",
"reward": 3.94799332242263,
"creation_time": 1687338455.213203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
],
"final_checkpoint": {
"steps": 2000055,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.94799332242263,
"creation_time": 1687338455.213203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}