ppo-Huggy / run_logs /training_status.json
embracethesock's picture
Huggy
f6b2e7a
raw
history blame contribute delete
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199800,
"file_path": "results/Huggy/Huggy/Huggy-199800.onnx",
"reward": 3.218682105652988,
"creation_time": 1702721012.2087917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199800.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy/Huggy/Huggy-399946.onnx",
"reward": 3.5277032535523176,
"creation_time": 1702721256.345544,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy/Huggy/Huggy-599922.onnx",
"reward": 4.232528946616433,
"creation_time": 1702721503.5813472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy/Huggy/Huggy-799982.onnx",
"reward": 3.751619515124332,
"creation_time": 1702721748.4814694,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999987,
"file_path": "results/Huggy/Huggy/Huggy-999987.onnx",
"reward": 3.7760170003560583,
"creation_time": 1702721999.1354427,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999987.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy/Huggy/Huggy-1199978.onnx",
"reward": 4.022116863905494,
"creation_time": 1702722247.2779682,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399993,
"file_path": "results/Huggy/Huggy/Huggy-1399993.onnx",
"reward": 3.7479613185842373,
"creation_time": 1702722493.5225515,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399993.pt"
]
},
{
"steps": 1599954,
"file_path": "results/Huggy/Huggy/Huggy-1599954.onnx",
"reward": 4.115926764420504,
"creation_time": 1702722739.7232206,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599954.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy/Huggy/Huggy-1799982.onnx",
"reward": 3.6867902028460464,
"creation_time": 1702722989.5637295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999998,
"file_path": "results/Huggy/Huggy/Huggy-1999998.onnx",
"reward": 3.7728655292437625,
"creation_time": 1702723238.4689517,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999998.pt"
]
},
{
"steps": 2000058,
"file_path": "results/Huggy/Huggy/Huggy-2000058.onnx",
"reward": 3.7437597085844794,
"creation_time": 1702723238.649916,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000058.pt"
]
}
],
"final_checkpoint": {
"steps": 2000058,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7437597085844794,
"creation_time": 1702723238.649916,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000058.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}