ppo-Huggy / run_logs /training_status.json
filodoxia's picture
Huggy
497f6a5 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199884,
"file_path": "results/Huggy2/Huggy/Huggy-199884.onnx",
"reward": 3.394341653212905,
"creation_time": 1713350215.2815857,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199884.pt"
]
},
{
"steps": 399915,
"file_path": "results/Huggy2/Huggy/Huggy-399915.onnx",
"reward": 3.503887174458339,
"creation_time": 1713350444.4386172,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399915.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy2/Huggy/Huggy-599933.onnx",
"reward": 3.5455414758009067,
"creation_time": 1713350677.3896723,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799745,
"file_path": "results/Huggy2/Huggy/Huggy-799745.onnx",
"reward": 3.8255964750014013,
"creation_time": 1713350906.1963804,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799745.pt"
]
},
{
"steps": 999976,
"file_path": "results/Huggy2/Huggy/Huggy-999976.onnx",
"reward": 3.741859982653362,
"creation_time": 1713351140.2979167,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999976.pt"
]
},
{
"steps": 1199975,
"file_path": "results/Huggy2/Huggy/Huggy-1199975.onnx",
"reward": 4.021582448319213,
"creation_time": 1713351377.9149208,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199975.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy2/Huggy/Huggy-1399994.onnx",
"reward": 3.659040158120995,
"creation_time": 1713351610.1535134,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599876,
"file_path": "results/Huggy2/Huggy/Huggy-1599876.onnx",
"reward": 3.7806322162234505,
"creation_time": 1713351849.9307055,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599876.pt"
]
},
{
"steps": 1799925,
"file_path": "results/Huggy2/Huggy/Huggy-1799925.onnx",
"reward": 3.9895857547720275,
"creation_time": 1713352089.820501,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799925.pt"
]
},
{
"steps": 1999950,
"file_path": "results/Huggy2/Huggy/Huggy-1999950.onnx",
"reward": 4.235251143574715,
"creation_time": 1713352325.4262657,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999950.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy2/Huggy/Huggy-2000016.onnx",
"reward": 4.231370911878698,
"creation_time": 1713352325.5559487,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.231370911878698,
"creation_time": 1713352325.5559487,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}