ppo-Huggy / run_logs /training_status.json
Swadine's picture
Huggy
fef8497
{
"Huggy": {
"checkpoints": [
{
"steps": 199829,
"file_path": "results/Huggy/Huggy/Huggy-199829.onnx",
"reward": 3.541808925072352,
"creation_time": 1702007432.2955012,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199829.pt"
]
},
{
"steps": 399952,
"file_path": "results/Huggy/Huggy/Huggy-399952.onnx",
"reward": 3.667097849004409,
"creation_time": 1702007667.8763874,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399952.pt"
]
},
{
"steps": 599982,
"file_path": "results/Huggy/Huggy/Huggy-599982.onnx",
"reward": 3.842747859954834,
"creation_time": 1702007906.902512,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599982.pt"
]
},
{
"steps": 799998,
"file_path": "results/Huggy/Huggy/Huggy-799998.onnx",
"reward": 3.6393764371774635,
"creation_time": 1702008146.0726988,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799998.pt"
]
},
{
"steps": 999994,
"file_path": "results/Huggy/Huggy/Huggy-999994.onnx",
"reward": 4.138070845887775,
"creation_time": 1702008385.4080105,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999994.pt"
]
},
{
"steps": 1199906,
"file_path": "results/Huggy/Huggy/Huggy-1199906.onnx",
"reward": 4.012117649702465,
"creation_time": 1702008634.3859901,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199906.pt"
]
},
{
"steps": 1399978,
"file_path": "results/Huggy/Huggy/Huggy-1399978.onnx",
"reward": 4.740994483232498,
"creation_time": 1702008890.1823084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399978.pt"
]
},
{
"steps": 1599954,
"file_path": "results/Huggy/Huggy/Huggy-1599954.onnx",
"reward": 3.823324697179944,
"creation_time": 1702009142.7029169,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599954.pt"
]
},
{
"steps": 1799405,
"file_path": "results/Huggy/Huggy/Huggy-1799405.onnx",
"reward": 3.8209002401869174,
"creation_time": 1702009397.9768107,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799405.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy/Huggy/Huggy-1999992.onnx",
"reward": 3.6806785100036197,
"creation_time": 1702009652.016916,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000060,
"file_path": "results/Huggy/Huggy/Huggy-2000060.onnx",
"reward": 3.6613688050089657,
"creation_time": 1702009652.1252277,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000060.pt"
]
}
],
"final_checkpoint": {
"steps": 2000060,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6613688050089657,
"creation_time": 1702009652.1252277,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000060.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}