ppo-Huggy / run_logs /training_status.json
CaphAlderamin's picture
Huggy
5df0db7 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199732,
"file_path": "results/Huggy2/Huggy/Huggy-199732.onnx",
"reward": 3.503874786333604,
"creation_time": 1708350221.4309108,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199732.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy2/Huggy/Huggy-399934.onnx",
"reward": 3.825198947437226,
"creation_time": 1708350472.7337434,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599891,
"file_path": "results/Huggy2/Huggy/Huggy-599891.onnx",
"reward": 3.7735334701008267,
"creation_time": 1708350727.811124,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599891.pt"
]
},
{
"steps": 799969,
"file_path": "results/Huggy2/Huggy/Huggy-799969.onnx",
"reward": 3.625470498282658,
"creation_time": 1708350980.423521,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799969.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy2/Huggy/Huggy-999956.onnx",
"reward": 3.490060005159605,
"creation_time": 1708351230.5204523,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199965,
"file_path": "results/Huggy2/Huggy/Huggy-1199965.onnx",
"reward": 3.3469055851300555,
"creation_time": 1708351479.017348,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199965.pt"
]
},
{
"steps": 1399905,
"file_path": "results/Huggy2/Huggy/Huggy-1399905.onnx",
"reward": 3.7726635014273455,
"creation_time": 1708351731.5217693,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399905.pt"
]
},
{
"steps": 1599898,
"file_path": "results/Huggy2/Huggy/Huggy-1599898.onnx",
"reward": 3.495565148424511,
"creation_time": 1708351984.4260368,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599898.pt"
]
},
{
"steps": 1799942,
"file_path": "results/Huggy2/Huggy/Huggy-1799942.onnx",
"reward": 4.263876909679837,
"creation_time": 1708352237.2062006,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799942.pt"
]
},
{
"steps": 1999392,
"file_path": "results/Huggy2/Huggy/Huggy-1999392.onnx",
"reward": 3.594919475966585,
"creation_time": 1708352483.5263805,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999392.pt"
]
},
{
"steps": 2000142,
"file_path": "results/Huggy2/Huggy/Huggy-2000142.onnx",
"reward": 3.5565502984183177,
"creation_time": 1708352483.7438662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000142.pt"
]
}
],
"final_checkpoint": {
"steps": 2000142,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5565502984183177,
"creation_time": 1708352483.7438662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000142.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.0+cu121"
}
}