ppo-Huggy / run_logs /training_status.json
jmarinfi's picture
Huggy
d14e164 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199910,
"file_path": "results/Huggy2/Huggy/Huggy-199910.onnx",
"reward": 3.1669903275084823,
"creation_time": 1725557547.9363768,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199910.pt"
]
},
{
"steps": 399971,
"file_path": "results/Huggy2/Huggy/Huggy-399971.onnx",
"reward": 3.79741033911705,
"creation_time": 1725557976.7046006,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399971.pt"
]
},
{
"steps": 599876,
"file_path": "results/Huggy2/Huggy/Huggy-599876.onnx",
"reward": 4.268630915454456,
"creation_time": 1725558417.4318137,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599876.pt"
]
},
{
"steps": 799962,
"file_path": "results/Huggy2/Huggy/Huggy-799962.onnx",
"reward": 3.753656720628544,
"creation_time": 1725558853.4550436,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799962.pt"
]
},
{
"steps": 999949,
"file_path": "results/Huggy2/Huggy/Huggy-999949.onnx",
"reward": 3.9874617404797497,
"creation_time": 1725559301.8474855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999949.pt"
]
},
{
"steps": 1199972,
"file_path": "results/Huggy2/Huggy/Huggy-1199972.onnx",
"reward": 3.873827345058566,
"creation_time": 1725559740.9637594,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199972.pt"
]
},
{
"steps": 1399679,
"file_path": "results/Huggy2/Huggy/Huggy-1399679.onnx",
"reward": 4.345399345670428,
"creation_time": 1725560185.6063962,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399679.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy2/Huggy/Huggy-1599964.onnx",
"reward": 3.677526204586029,
"creation_time": 1725560602.0500426,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799994,
"file_path": "results/Huggy2/Huggy/Huggy-1799994.onnx",
"reward": 3.7151085828127486,
"creation_time": 1725561031.2004678,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799994.pt"
]
},
{
"steps": 1999994,
"file_path": "results/Huggy2/Huggy/Huggy-1999994.onnx",
"reward": 3.564417453912588,
"creation_time": 1725561471.6754107,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999994.pt"
]
},
{
"steps": 2000217,
"file_path": "results/Huggy2/Huggy/Huggy-2000217.onnx",
"reward": 3.698956330617269,
"creation_time": 1725561471.7897723,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000217.pt"
]
}
],
"final_checkpoint": {
"steps": 2000217,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.698956330617269,
"creation_time": 1725561471.7897723,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000217.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.0+cu121"
}
}