ppo-Huggy / run_logs /training_status.json
sdidier-dev's picture
Huggy
b9df619 verified
raw
history blame contribute delete
No virus
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199990,
"file_path": "results/Huggy2/Huggy/Huggy-199990.onnx",
"reward": 3.39762633525092,
"creation_time": 1710781126.630411,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199990.pt"
]
},
{
"steps": 399924,
"file_path": "results/Huggy2/Huggy/Huggy-399924.onnx",
"reward": 3.761957597164881,
"creation_time": 1710781351.0347087,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399924.pt"
]
},
{
"steps": 599952,
"file_path": "results/Huggy2/Huggy/Huggy-599952.onnx",
"reward": 4.453476162517772,
"creation_time": 1710781578.1404314,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599952.pt"
]
},
{
"steps": 799967,
"file_path": "results/Huggy2/Huggy/Huggy-799967.onnx",
"reward": 3.7728568385808896,
"creation_time": 1710781805.573369,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799967.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy2/Huggy/Huggy-999996.onnx",
"reward": 3.778133083509919,
"creation_time": 1710782038.2757783,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199917,
"file_path": "results/Huggy2/Huggy/Huggy-1199917.onnx",
"reward": 3.7655354331179365,
"creation_time": 1710782271.8507307,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199917.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy2/Huggy/Huggy-1399999.onnx",
"reward": 3.4827212413152058,
"creation_time": 1710782505.8372777,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599949,
"file_path": "results/Huggy2/Huggy/Huggy-1599949.onnx",
"reward": 3.6588941461222184,
"creation_time": 1710782739.4450674,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599949.pt"
]
},
{
"steps": 1799918,
"file_path": "results/Huggy2/Huggy/Huggy-1799918.onnx",
"reward": 3.6698566675186157,
"creation_time": 1710782980.6693594,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799918.pt"
]
},
{
"steps": 1999928,
"file_path": "results/Huggy2/Huggy/Huggy-1999928.onnx",
"reward": 3.640997742995238,
"creation_time": 1710783222.910149,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999928.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy2/Huggy/Huggy-2000043.onnx",
"reward": 3.7019227534532546,
"creation_time": 1710783223.0307035,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7019227534532546,
"creation_time": 1710783223.0307035,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}