ppo-Huggy / run_logs /training_status.json
maaldic's picture
Huggy
0c60efc verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199952,
"file_path": "results/Huggy2/Huggy/Huggy-199952.onnx",
"reward": 3.315463025167764,
"creation_time": 1725202035.8863235,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199952.pt"
]
},
{
"steps": 399966,
"file_path": "results/Huggy2/Huggy/Huggy-399966.onnx",
"reward": 3.49128394684893,
"creation_time": 1725202512.359278,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399966.pt"
]
},
{
"steps": 599972,
"file_path": "results/Huggy2/Huggy/Huggy-599972.onnx",
"reward": 3.121417760848999,
"creation_time": 1725203005.4570835,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599972.pt"
]
},
{
"steps": 799939,
"file_path": "results/Huggy2/Huggy/Huggy-799939.onnx",
"reward": 3.4720369936788784,
"creation_time": 1725203477.4064581,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799939.pt"
]
},
{
"steps": 999970,
"file_path": "results/Huggy2/Huggy/Huggy-999970.onnx",
"reward": 3.759049678390676,
"creation_time": 1725203962.978513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999970.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy2/Huggy/Huggy-1199995.onnx",
"reward": 3.4867587208747866,
"creation_time": 1725204449.068049,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399638,
"file_path": "results/Huggy2/Huggy/Huggy-1399638.onnx",
"reward": 3.7110532071701314,
"creation_time": 1725204921.3876188,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399638.pt"
]
},
{
"steps": 1599929,
"file_path": "results/Huggy2/Huggy/Huggy-1599929.onnx",
"reward": 3.4119418984965275,
"creation_time": 1725205403.3349817,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599929.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy2/Huggy/Huggy-1799977.onnx",
"reward": 3.444930943949469,
"creation_time": 1725205882.0211587,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999593,
"file_path": "results/Huggy2/Huggy/Huggy-1999593.onnx",
"reward": 3.543607249165213,
"creation_time": 1725206345.1361537,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999593.pt"
]
},
{
"steps": 2000343,
"file_path": "results/Huggy2/Huggy/Huggy-2000343.onnx",
"reward": 3.4991420146666075,
"creation_time": 1725206345.3634958,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000343.pt"
]
}
],
"final_checkpoint": {
"steps": 2000343,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.4991420146666075,
"creation_time": 1725206345.3634958,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000343.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.0+cu121"
}
}