ppo-Huggy / run_logs /training_status.json
TaniaSF's picture
Huggy
ae10096 verified
raw
history blame
No virus
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199774,
"file_path": "results/Huggy2/Huggy/Huggy-199774.onnx",
"reward": 3.419822163269168,
"creation_time": 1714073886.7461698,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199774.pt"
]
},
{
"steps": 399988,
"file_path": "results/Huggy2/Huggy/Huggy-399988.onnx",
"reward": 3.5733941061930223,
"creation_time": 1714074145.0853026,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399988.pt"
]
},
{
"steps": 599580,
"file_path": "results/Huggy2/Huggy/Huggy-599580.onnx",
"reward": 4.3217428828540605,
"creation_time": 1714074402.49548,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599580.pt"
]
},
{
"steps": 799846,
"file_path": "results/Huggy2/Huggy/Huggy-799846.onnx",
"reward": 3.833383857857349,
"creation_time": 1714074655.4827416,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799846.pt"
]
},
{
"steps": 999272,
"file_path": "results/Huggy2/Huggy/Huggy-999272.onnx",
"reward": 3.775039091706276,
"creation_time": 1714074915.3131223,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999272.pt"
]
},
{
"steps": 1199957,
"file_path": "results/Huggy2/Huggy/Huggy-1199957.onnx",
"reward": 3.6758741239706674,
"creation_time": 1714075173.5577579,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199957.pt"
]
},
{
"steps": 1399958,
"file_path": "results/Huggy2/Huggy/Huggy-1399958.onnx",
"reward": 3.526920336919526,
"creation_time": 1714075428.9723713,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399958.pt"
]
},
{
"steps": 1599997,
"file_path": "results/Huggy2/Huggy/Huggy-1599997.onnx",
"reward": 4.0847567430427,
"creation_time": 1714075688.5061297,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599997.pt"
]
},
{
"steps": 1799356,
"file_path": "results/Huggy2/Huggy/Huggy-1799356.onnx",
"reward": 3.38430500572378,
"creation_time": 1714075950.6438396,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799356.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.7369117149755584,
"creation_time": 1714076208.9361374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000742,
"file_path": "results/Huggy2/Huggy/Huggy-2000742.onnx",
"reward": 3.7023114335536955,
"creation_time": 1714076209.0836964,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000742.pt"
]
}
],
"final_checkpoint": {
"steps": 2000742,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7023114335536955,
"creation_time": 1714076209.0836964,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000742.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}