ppo-Huggy / run_logs /training_status.json
eikoenchine's picture
Huggy
0a5a951
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1690330292.5987942,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199930,
"file_path": "results/Huggy/Huggy/Huggy-199930.onnx",
"reward": 3.3902546872695285,
"creation_time": 1690330495.7250135,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199930.pt"
]
},
{
"steps": 399902,
"file_path": "results/Huggy/Huggy/Huggy-399902.onnx",
"reward": 3.8888832995148954,
"creation_time": 1690330689.5510848,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399902.pt"
]
},
{
"steps": 599399,
"file_path": "results/Huggy/Huggy/Huggy-599399.onnx",
"reward": 2.9535060564676923,
"creation_time": 1690330885.0966837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599399.pt"
]
},
{
"steps": 799772,
"file_path": "results/Huggy/Huggy/Huggy-799772.onnx",
"reward": 3.796442897273944,
"creation_time": 1690331077.9653506,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799772.pt"
]
},
{
"steps": 999844,
"file_path": "results/Huggy/Huggy/Huggy-999844.onnx",
"reward": 3.7982390924640326,
"creation_time": 1690331274.1986604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999844.pt"
]
},
{
"steps": 1199918,
"file_path": "results/Huggy/Huggy/Huggy-1199918.onnx",
"reward": 3.3939971801909534,
"creation_time": 1690331470.7879143,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199918.pt"
]
},
{
"steps": 1399983,
"file_path": "results/Huggy/Huggy/Huggy-1399983.onnx",
"reward": 3.454694376140833,
"creation_time": 1690331662.834823,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399983.pt"
]
},
{
"steps": 1599680,
"file_path": "results/Huggy/Huggy/Huggy-1599680.onnx",
"reward": 3.968809755103102,
"creation_time": 1690331858.648934,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599680.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy/Huggy/Huggy-1799956.onnx",
"reward": 4.019580002416644,
"creation_time": 1690332054.1592753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999933,
"file_path": "results/Huggy/Huggy/Huggy-1999933.onnx",
"reward": 3.6663357784231025,
"creation_time": 1690332244.8488967,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999933.pt"
]
},
{
"steps": 2000036,
"file_path": "results/Huggy/Huggy/Huggy-2000036.onnx",
"reward": 3.6773437629202883,
"creation_time": 1690332244.9643779,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
],
"final_checkpoint": {
"steps": 2000036,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6773437629202883,
"creation_time": 1690332244.9643779,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
}
}