ppo-Huggy / run_logs /training_status.json
GarciaDos's picture
Huggy
5285dd0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199998,
"file_path": "results/Huggy2/Huggy/Huggy-199998.onnx",
"reward": 3.23256043891112,
"creation_time": 1718017648.8726635,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199998.pt"
]
},
{
"steps": 399835,
"file_path": "results/Huggy2/Huggy/Huggy-399835.onnx",
"reward": 3.9487328318988575,
"creation_time": 1718017906.515775,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399835.pt"
]
},
{
"steps": 599871,
"file_path": "results/Huggy2/Huggy/Huggy-599871.onnx",
"reward": 4.1391080088085594,
"creation_time": 1718018174.4673405,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599871.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy2/Huggy/Huggy-799956.onnx",
"reward": 3.533953942634441,
"creation_time": 1718018444.6571507,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy2/Huggy/Huggy-999969.onnx",
"reward": 4.126392269134522,
"creation_time": 1718018717.7436256,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199992,
"file_path": "results/Huggy2/Huggy/Huggy-1199992.onnx",
"reward": 3.8521746377475927,
"creation_time": 1718018976.4154701,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199992.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy2/Huggy/Huggy-1399985.onnx",
"reward": 3.7139875168270535,
"creation_time": 1718019236.5179675,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599643,
"file_path": "results/Huggy2/Huggy/Huggy-1599643.onnx",
"reward": 3.7617212031036615,
"creation_time": 1718019502.6345613,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599643.pt"
]
},
{
"steps": 1799922,
"file_path": "results/Huggy2/Huggy/Huggy-1799922.onnx",
"reward": 3.6198219349509793,
"creation_time": 1718019763.1058247,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799922.pt"
]
},
{
"steps": 1999989,
"file_path": "results/Huggy2/Huggy/Huggy-1999989.onnx",
"reward": 3.19519499540329,
"creation_time": 1718020017.375242,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999989.pt"
]
},
{
"steps": 2000107,
"file_path": "results/Huggy2/Huggy/Huggy-2000107.onnx",
"reward": 3.306581054415022,
"creation_time": 1718020017.4954214,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000107.pt"
]
}
],
"final_checkpoint": {
"steps": 2000107,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.306581054415022,
"creation_time": 1718020017.4954214,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000107.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}