ppo-Huggy / run_logs /training_status.json
flyman123's picture
Huggy-v0
30fd152 verified
raw
history blame
4.69 kB
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1706110340.7719877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199967,
"file_path": "results/Huggy/Huggy/Huggy-199967.onnx",
"reward": 3.37694988027215,
"creation_time": 1706110712.4123497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399912,
"file_path": "results/Huggy/Huggy/Huggy-399912.onnx",
"reward": 3.835635538062742,
"creation_time": 1706110946.8855193,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399912.pt"
]
},
{
"steps": 599955,
"file_path": "results/Huggy/Huggy/Huggy-599955.onnx",
"reward": 4.121821405986945,
"creation_time": 1706111179.311281,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599955.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy/Huggy/Huggy-799934.onnx",
"reward": 4.021927364756552,
"creation_time": 1706111411.9173825,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 3.874256270843864,
"creation_time": 1706111655.9843495,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199980,
"file_path": "results/Huggy/Huggy/Huggy-1199980.onnx",
"reward": 4.026018446997592,
"creation_time": 1706111901.7762918,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199980.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy/Huggy/Huggy-1399990.onnx",
"reward": 3.2254076855523244,
"creation_time": 1706112139.3126693,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy/Huggy/Huggy-1599992.onnx",
"reward": 4.001443887859714,
"creation_time": 1706112373.9212427,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy/Huggy/Huggy-1799972.onnx",
"reward": 3.999176986251317,
"creation_time": 1706112610.2225149,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999925,
"file_path": "results/Huggy/Huggy/Huggy-1999925.onnx",
"reward": 3.7665049007960727,
"creation_time": 1706112845.768263,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999925.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy/Huggy/Huggy-2000030.onnx",
"reward": 3.774885378777981,
"creation_time": 1706112845.9398303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.774885378777981,
"creation_time": 1706112845.9398303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
}
}