ppo-Huggy / run_logs /training_status.json
salohiddin94's picture
Huggy
1acefc3
{
"Huggy": {
"checkpoints": [
{
"steps": 199775,
"file_path": "results/Huggy/Huggy/Huggy-199775.onnx",
"reward": 3.2686916868408007,
"creation_time": 1697086149.7742116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199775.pt"
]
},
{
"steps": 399885,
"file_path": "results/Huggy/Huggy/Huggy-399885.onnx",
"reward": 3.360030142397716,
"creation_time": 1697086388.8397973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399885.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy/Huggy/Huggy-599973.onnx",
"reward": 4.651391213590449,
"creation_time": 1697086624.76516,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799937,
"file_path": "results/Huggy/Huggy/Huggy-799937.onnx",
"reward": 4.084472353641804,
"creation_time": 1697086856.7276897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799937.pt"
]
},
{
"steps": 999973,
"file_path": "results/Huggy/Huggy/Huggy-999973.onnx",
"reward": 3.7362084540187337,
"creation_time": 1697087095.1183255,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999973.pt"
]
},
{
"steps": 1199909,
"file_path": "results/Huggy/Huggy/Huggy-1199909.onnx",
"reward": 4.603170867149647,
"creation_time": 1697087337.361509,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199909.pt"
]
},
{
"steps": 1399965,
"file_path": "results/Huggy/Huggy/Huggy-1399965.onnx",
"reward": 3.9130412376924757,
"creation_time": 1697087575.0767522,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399965.pt"
]
},
{
"steps": 1599925,
"file_path": "results/Huggy/Huggy/Huggy-1599925.onnx",
"reward": 3.760768556431548,
"creation_time": 1697087816.6765826,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599925.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy/Huggy/Huggy-1799988.onnx",
"reward": 3.55496042653134,
"creation_time": 1697088067.483964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999511,
"file_path": "results/Huggy/Huggy/Huggy-1999511.onnx",
"reward": 4.448199462890625,
"creation_time": 1697088313.9746797,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999511.pt"
]
},
{
"steps": 2000261,
"file_path": "results/Huggy/Huggy/Huggy-2000261.onnx",
"reward": 3.1866596142450967,
"creation_time": 1697088314.1116834,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000261.pt"
]
}
],
"final_checkpoint": {
"steps": 2000261,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.1866596142450967,
"creation_time": 1697088314.1116834,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000261.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.0.1+cu118"
}
}