ppo-Huggy / run_logs /training_status.json
greyfoss's picture
Huggy
a636ed8 verified
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199831,
"file_path": "results/Huggy/Huggy/Huggy-199831.onnx",
"reward": 3.3625189582506816,
"creation_time": 1705550141.2761269,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199831.pt"
]
},
{
"steps": 399942,
"file_path": "results/Huggy/Huggy/Huggy-399942.onnx",
"reward": 3.578501748316216,
"creation_time": 1705550410.7067719,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399942.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy/Huggy/Huggy-599933.onnx",
"reward": 3.154873749305462,
"creation_time": 1705550686.00234,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799937,
"file_path": "results/Huggy/Huggy/Huggy-799937.onnx",
"reward": 3.836544080712329,
"creation_time": 1705550956.6508164,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799937.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy/Huggy/Huggy-999990.onnx",
"reward": 3.601367387345167,
"creation_time": 1705551230.8573575,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199875,
"file_path": "results/Huggy/Huggy/Huggy-1199875.onnx",
"reward": 3.912618480126063,
"creation_time": 1705551505.878475,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199875.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy/Huggy/Huggy-1399996.onnx",
"reward": 3.6816984626459957,
"creation_time": 1705551775.2004561,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599935,
"file_path": "results/Huggy/Huggy/Huggy-1599935.onnx",
"reward": 4.058008329591889,
"creation_time": 1705552049.8270583,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599935.pt"
]
},
{
"steps": 1799965,
"file_path": "results/Huggy/Huggy/Huggy-1799965.onnx",
"reward": 3.59506976290753,
"creation_time": 1705552323.03203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799965.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 3.7092982347194967,
"creation_time": 1705552590.869056,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000078,
"file_path": "results/Huggy/Huggy/Huggy-2000078.onnx",
"reward": 3.7170952404515023,
"creation_time": 1705552591.0555177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000078.pt"
]
}
],
"final_checkpoint": {
"steps": 2000078,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7170952404515023,
"creation_time": 1705552591.0555177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000078.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}