ppo-Huggy / run_logs /training_status.json
achupakhin's picture
Huggy
6f9d8df verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199976,
"file_path": "results/Huggy2/Huggy/Huggy-199976.onnx",
"reward": 3.753454156460301,
"creation_time": 1732632775.060497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199976.pt"
]
},
{
"steps": 399897,
"file_path": "results/Huggy2/Huggy/Huggy-399897.onnx",
"reward": 3.844302045155878,
"creation_time": 1732633046.5572073,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399897.pt"
]
},
{
"steps": 599926,
"file_path": "results/Huggy2/Huggy/Huggy-599926.onnx",
"reward": 4.454173097610473,
"creation_time": 1732633319.9299343,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599926.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy2/Huggy/Huggy-799968.onnx",
"reward": 3.9349432662881987,
"creation_time": 1732633596.6525478,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999910,
"file_path": "results/Huggy2/Huggy/Huggy-999910.onnx",
"reward": 3.953412116450422,
"creation_time": 1732633879.3150246,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999910.pt"
]
},
{
"steps": 1199926,
"file_path": "results/Huggy2/Huggy/Huggy-1199926.onnx",
"reward": 3.7434336076944303,
"creation_time": 1732634164.2866962,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199926.pt"
]
},
{
"steps": 1399862,
"file_path": "results/Huggy2/Huggy/Huggy-1399862.onnx",
"reward": 3.9817099826676503,
"creation_time": 1732634455.528902,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399862.pt"
]
},
{
"steps": 1599261,
"file_path": "results/Huggy2/Huggy/Huggy-1599261.onnx",
"reward": 3.622300325978732,
"creation_time": 1732634732.3003237,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599261.pt"
]
},
{
"steps": 1799881,
"file_path": "results/Huggy2/Huggy/Huggy-1799881.onnx",
"reward": 3.4993496189514794,
"creation_time": 1732635012.2632797,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799881.pt"
]
},
{
"steps": 1999980,
"file_path": "results/Huggy2/Huggy/Huggy-1999980.onnx",
"reward": 3.5276609296384067,
"creation_time": 1732635293.263952,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999980.pt"
]
},
{
"steps": 2000073,
"file_path": "results/Huggy2/Huggy/Huggy-2000073.onnx",
"reward": 3.5985812743504844,
"creation_time": 1732635293.3974922,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000073.pt"
]
}
],
"final_checkpoint": {
"steps": 2000073,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5985812743504844,
"creation_time": 1732635293.3974922,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000073.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}