ppo-Huggy / run_logs /training_status.json
joelb's picture
Huggy
72e297e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199779,
"file_path": "results/Huggy2/Huggy/Huggy-199779.onnx",
"reward": 3.4720515275001524,
"creation_time": 1731096704.2380702,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199779.pt"
]
},
{
"steps": 399924,
"file_path": "results/Huggy2/Huggy/Huggy-399924.onnx",
"reward": 3.5101620393494763,
"creation_time": 1731096942.2945316,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399924.pt"
]
},
{
"steps": 599926,
"file_path": "results/Huggy2/Huggy/Huggy-599926.onnx",
"reward": 3.39479852716128,
"creation_time": 1731097181.6620939,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599926.pt"
]
},
{
"steps": 799967,
"file_path": "results/Huggy2/Huggy/Huggy-799967.onnx",
"reward": 3.8269828348829034,
"creation_time": 1731097421.466843,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799967.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy2/Huggy/Huggy-999955.onnx",
"reward": 3.871831700757698,
"creation_time": 1731097665.8269312,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199953,
"file_path": "results/Huggy2/Huggy/Huggy-1199953.onnx",
"reward": 3.253111954142408,
"creation_time": 1731097910.092432,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199953.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy2/Huggy/Huggy-1399991.onnx",
"reward": 3.6264347568154336,
"creation_time": 1731098152.2945619,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599609,
"file_path": "results/Huggy2/Huggy/Huggy-1599609.onnx",
"reward": 3.739722862730931,
"creation_time": 1731098392.5119438,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599609.pt"
]
},
{
"steps": 1799935,
"file_path": "results/Huggy2/Huggy/Huggy-1799935.onnx",
"reward": 3.76418987032655,
"creation_time": 1731098633.215934,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799935.pt"
]
},
{
"steps": 1999911,
"file_path": "results/Huggy2/Huggy/Huggy-1999911.onnx",
"reward": 4.34602137406667,
"creation_time": 1731098874.7079213,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999911.pt"
]
},
{
"steps": 2000040,
"file_path": "results/Huggy2/Huggy/Huggy-2000040.onnx",
"reward": 4.562111413478851,
"creation_time": 1731098874.8219707,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000040.pt"
]
}
],
"final_checkpoint": {
"steps": 2000040,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.562111413478851,
"creation_time": 1731098874.8219707,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000040.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.0+cu121"
}
}