ppo-Huggy / run_logs /training_status.json
expilu's picture
Huggy
c8aadba verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199693,
"file_path": "results/Huggy2/Huggy/Huggy-199693.onnx",
"reward": 3.1298132574380335,
"creation_time": 1713181660.226027,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199693.pt"
]
},
{
"steps": 399982,
"file_path": "results/Huggy2/Huggy/Huggy-399982.onnx",
"reward": 3.6323047570280127,
"creation_time": 1713182003.6520715,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399982.pt"
]
},
{
"steps": 599983,
"file_path": "results/Huggy2/Huggy/Huggy-599983.onnx",
"reward": 4.2976923286914825,
"creation_time": 1713182344.969488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599983.pt"
]
},
{
"steps": 799992,
"file_path": "results/Huggy2/Huggy/Huggy-799992.onnx",
"reward": 3.871437426617271,
"creation_time": 1713182705.816768,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799992.pt"
]
},
{
"steps": 999999,
"file_path": "results/Huggy2/Huggy/Huggy-999999.onnx",
"reward": 3.8126951805182867,
"creation_time": 1713183052.5449996,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999999.pt"
]
},
{
"steps": 1199932,
"file_path": "results/Huggy2/Huggy/Huggy-1199932.onnx",
"reward": 3.764046692422458,
"creation_time": 1713183407.7478235,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199932.pt"
]
},
{
"steps": 1399902,
"file_path": "results/Huggy2/Huggy/Huggy-1399902.onnx",
"reward": 3.5721516949789867,
"creation_time": 1713183763.1426167,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399902.pt"
]
},
{
"steps": 1599548,
"file_path": "results/Huggy2/Huggy/Huggy-1599548.onnx",
"reward": 3.9951934981990505,
"creation_time": 1713184120.894656,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599548.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy2/Huggy/Huggy-1799969.onnx",
"reward": 3.7468933895014334,
"creation_time": 1713184477.1914043,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999946,
"file_path": "results/Huggy2/Huggy/Huggy-1999946.onnx",
"reward": 4.001069817626686,
"creation_time": 1713184834.6260347,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999946.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy2/Huggy/Huggy-2000043.onnx",
"reward": 4.004622126447743,
"creation_time": 1713184834.8703551,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.004622126447743,
"creation_time": 1713184834.8703551,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}