ppo-Huggy / run_logs /training_status.json
Slyne's picture
Huggy
ed626fa verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199901,
"file_path": "results/Huggy2/Huggy/Huggy-199901.onnx",
"reward": 3.5137154128816395,
"creation_time": 1739341567.873179,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199901.pt"
]
},
{
"steps": 399835,
"file_path": "results/Huggy2/Huggy/Huggy-399835.onnx",
"reward": 3.2668223158787875,
"creation_time": 1739341809.9588556,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399835.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy2/Huggy/Huggy-599933.onnx",
"reward": 3.549629290898641,
"creation_time": 1739342056.0736842,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799658,
"file_path": "results/Huggy2/Huggy/Huggy-799658.onnx",
"reward": 3.7574045684857245,
"creation_time": 1739342300.3282053,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799658.pt"
]
},
{
"steps": 999968,
"file_path": "results/Huggy2/Huggy/Huggy-999968.onnx",
"reward": 3.8430258707474856,
"creation_time": 1739342547.8319242,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999968.pt"
]
},
{
"steps": 1199955,
"file_path": "results/Huggy2/Huggy/Huggy-1199955.onnx",
"reward": 3.519961876253928,
"creation_time": 1739342796.8412027,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199955.pt"
]
},
{
"steps": 1399971,
"file_path": "results/Huggy2/Huggy/Huggy-1399971.onnx",
"reward": 3.786273362806865,
"creation_time": 1739343041.9545205,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399971.pt"
]
},
{
"steps": 1599947,
"file_path": "results/Huggy2/Huggy/Huggy-1599947.onnx",
"reward": 3.9130476441845965,
"creation_time": 1739343287.4637904,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599947.pt"
]
},
{
"steps": 1799990,
"file_path": "results/Huggy2/Huggy/Huggy-1799990.onnx",
"reward": 3.508977418076502,
"creation_time": 1739343533.6478775,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799990.pt"
]
},
{
"steps": 1999679,
"file_path": "results/Huggy2/Huggy/Huggy-1999679.onnx",
"reward": 4.471989086696079,
"creation_time": 1739343779.7271445,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999679.pt"
]
},
{
"steps": 2000429,
"file_path": "results/Huggy2/Huggy/Huggy-2000429.onnx",
"reward": 3.5639549791812897,
"creation_time": 1739343779.8753374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000429.pt"
]
}
],
"final_checkpoint": {
"steps": 2000429,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5639549791812897,
"creation_time": 1739343779.8753374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000429.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}