ppo-Huggy / run_logs /training_status.json
Coddieharsh's picture
Huggy
cd82fb2 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199898,
"file_path": "results/Huggy2/Huggy/Huggy-199898.onnx",
"reward": 3.5422962121109465,
"creation_time": 1713444394.8665607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199898.pt"
]
},
{
"steps": 399935,
"file_path": "results/Huggy2/Huggy/Huggy-399935.onnx",
"reward": 3.8301876573001636,
"creation_time": 1713444631.989519,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399935.pt"
]
},
{
"steps": 599969,
"file_path": "results/Huggy2/Huggy/Huggy-599969.onnx",
"reward": 3.614039969059729,
"creation_time": 1713444873.3495243,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599969.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy2/Huggy/Huggy-799990.onnx",
"reward": 3.543715935945511,
"creation_time": 1713445109.9323406,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999994,
"file_path": "results/Huggy2/Huggy/Huggy-999994.onnx",
"reward": 3.894002625465393,
"creation_time": 1713445352.7281563,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999994.pt"
]
},
{
"steps": 1199966,
"file_path": "results/Huggy2/Huggy/Huggy-1199966.onnx",
"reward": 3.827685143849621,
"creation_time": 1713445593.5503693,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199966.pt"
]
},
{
"steps": 1399901,
"file_path": "results/Huggy2/Huggy/Huggy-1399901.onnx",
"reward": 4.857671546936035,
"creation_time": 1713445826.4498928,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399901.pt"
]
},
{
"steps": 1599939,
"file_path": "results/Huggy2/Huggy/Huggy-1599939.onnx",
"reward": 3.842915748135518,
"creation_time": 1713446056.481183,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599939.pt"
]
},
{
"steps": 1799960,
"file_path": "results/Huggy2/Huggy/Huggy-1799960.onnx",
"reward": 3.5134800790498653,
"creation_time": 1713446288.0904858,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799960.pt"
]
},
{
"steps": 1999967,
"file_path": "results/Huggy2/Huggy/Huggy-1999967.onnx",
"reward": 4.082277990676261,
"creation_time": 1713446518.191606,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999967.pt"
]
},
{
"steps": 2000094,
"file_path": "results/Huggy2/Huggy/Huggy-2000094.onnx",
"reward": 4.125900190127523,
"creation_time": 1713446518.3077471,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000094.pt"
]
}
],
"final_checkpoint": {
"steps": 2000094,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.125900190127523,
"creation_time": 1713446518.3077471,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000094.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}