ppo-Huggy / run_logs /training_status.json
ZivK's picture
Huggy
6c9787a
{
"Huggy": {
"checkpoints": [
{
"steps": 199938,
"file_path": "results/Huggy/Huggy/Huggy-199938.onnx",
"reward": 2.9699934826177707,
"creation_time": 1699872626.603245,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199938.pt"
]
},
{
"steps": 399897,
"file_path": "results/Huggy/Huggy/Huggy-399897.onnx",
"reward": 3.784656543580313,
"creation_time": 1699872868.4206789,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399897.pt"
]
},
{
"steps": 599865,
"file_path": "results/Huggy/Huggy/Huggy-599865.onnx",
"reward": 3.3722850960843704,
"creation_time": 1699873109.6940293,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599865.pt"
]
},
{
"steps": 799913,
"file_path": "results/Huggy/Huggy/Huggy-799913.onnx",
"reward": 3.9484246895426796,
"creation_time": 1699873351.6495988,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799913.pt"
]
},
{
"steps": 999941,
"file_path": "results/Huggy/Huggy/Huggy-999941.onnx",
"reward": 3.8373227886957664,
"creation_time": 1699873596.3837771,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999941.pt"
]
},
{
"steps": 1199880,
"file_path": "results/Huggy/Huggy/Huggy-1199880.onnx",
"reward": 3.662098710601394,
"creation_time": 1699873842.6125116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199880.pt"
]
},
{
"steps": 1399987,
"file_path": "results/Huggy/Huggy/Huggy-1399987.onnx",
"reward": 4.130171571459089,
"creation_time": 1699874086.5614934,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399987.pt"
]
},
{
"steps": 1599969,
"file_path": "results/Huggy/Huggy/Huggy-1599969.onnx",
"reward": 3.7461708298949308,
"creation_time": 1699874324.0469055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599969.pt"
]
},
{
"steps": 1799841,
"file_path": "results/Huggy/Huggy/Huggy-1799841.onnx",
"reward": 3.7815519668098188,
"creation_time": 1699874558.399145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799841.pt"
]
},
{
"steps": 1999984,
"file_path": "results/Huggy/Huggy/Huggy-1999984.onnx",
"reward": 4.295433770865202,
"creation_time": 1699874792.486045,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999984.pt"
]
},
{
"steps": 2000038,
"file_path": "results/Huggy/Huggy/Huggy-2000038.onnx",
"reward": 4.264307018482324,
"creation_time": 1699874792.6405125,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
],
"final_checkpoint": {
"steps": 2000038,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.264307018482324,
"creation_time": 1699874792.6405125,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}