ppo-Huggy / run_logs /training_status.json
gyaan's picture
Huggy
06a7314
raw
history blame contribute delete
No virus
4.36 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199923,
"file_path": "results/Huggy/Huggy/Huggy-199923.onnx",
"reward": 3.333000889567078,
"creation_time": 1696372607.411563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199923.pt"
]
},
{
"steps": 399975,
"file_path": "results/Huggy/Huggy/Huggy-399975.onnx",
"reward": 3.5710721109594616,
"creation_time": 1696372872.9442825,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399975.pt"
]
},
{
"steps": 599918,
"file_path": "results/Huggy/Huggy/Huggy-599918.onnx",
"reward": 3.763862485470979,
"creation_time": 1696373140.1081247,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599918.pt"
]
},
{
"steps": 799916,
"file_path": "results/Huggy/Huggy/Huggy-799916.onnx",
"reward": 4.046138451055244,
"creation_time": 1696373409.0392067,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799916.pt"
]
},
{
"steps": 999934,
"file_path": "results/Huggy/Huggy/Huggy-999934.onnx",
"reward": 3.8888021540050666,
"creation_time": 1696373682.1701727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999934.pt"
]
},
{
"steps": 1199911,
"file_path": "results/Huggy/Huggy/Huggy-1199911.onnx",
"reward": 3.664740467071533,
"creation_time": 1696373955.4775069,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199911.pt"
]
},
{
"steps": 1399906,
"file_path": "results/Huggy/Huggy/Huggy-1399906.onnx",
"reward": null,
"creation_time": 1696374236.0279622,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399906.pt"
]
},
{
"steps": 1599957,
"file_path": "results/Huggy/Huggy/Huggy-1599957.onnx",
"reward": 3.9728812785346275,
"creation_time": 1696374526.0730348,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599957.pt"
]
},
{
"steps": 1799922,
"file_path": "results/Huggy/Huggy/Huggy-1799922.onnx",
"reward": 4.079513421425452,
"creation_time": 1696374809.1871288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799922.pt"
]
},
{
"steps": 1999550,
"file_path": "results/Huggy/Huggy/Huggy-1999550.onnx",
"reward": 4.329372820754846,
"creation_time": 1696375075.9511454,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999550.pt"
]
},
{
"steps": 2000300,
"file_path": "results/Huggy/Huggy/Huggy-2000300.onnx",
"reward": 4.176403427610592,
"creation_time": 1696375076.1883142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000300.pt"
]
}
],
"final_checkpoint": {
"steps": 2000300,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.176403427610592,
"creation_time": 1696375076.1883142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000300.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}