ppo-huggy / run_logs /training_status.json
hieule's picture
Huggy
d2b37b1
{
"Huggy": {
"checkpoints": [
{
"steps": 199867,
"file_path": "results/Huggy/Huggy/Huggy-199867.onnx",
"reward": 3.4894721747992876,
"creation_time": 1674035077.5849078,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199867.pt"
]
},
{
"steps": 399908,
"file_path": "results/Huggy/Huggy/Huggy-399908.onnx",
"reward": 3.8147015197802396,
"creation_time": 1674035294.1145055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399908.pt"
]
},
{
"steps": 599988,
"file_path": "results/Huggy/Huggy/Huggy-599988.onnx",
"reward": 4.052824581662814,
"creation_time": 1674035514.2603154,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599988.pt"
]
},
{
"steps": 799995,
"file_path": "results/Huggy/Huggy/Huggy-799995.onnx",
"reward": 3.7477628609590363,
"creation_time": 1674035730.4748623,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799995.pt"
]
},
{
"steps": 999914,
"file_path": "results/Huggy/Huggy/Huggy-999914.onnx",
"reward": 3.996601617998547,
"creation_time": 1674035951.9805753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999914.pt"
]
},
{
"steps": 1199932,
"file_path": "results/Huggy/Huggy/Huggy-1199932.onnx",
"reward": 3.8573938970980435,
"creation_time": 1674036173.1443317,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199932.pt"
]
},
{
"steps": 1399950,
"file_path": "results/Huggy/Huggy/Huggy-1399950.onnx",
"reward": null,
"creation_time": 1674036394.4689267,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399950.pt"
]
},
{
"steps": 1599937,
"file_path": "results/Huggy/Huggy/Huggy-1599937.onnx",
"reward": 3.3759322437016586,
"creation_time": 1674036612.4451277,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599937.pt"
]
},
{
"steps": 1799967,
"file_path": "results/Huggy/Huggy/Huggy-1799967.onnx",
"reward": 3.7912767468528314,
"creation_time": 1674036831.3172972,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799967.pt"
]
},
{
"steps": 1999944,
"file_path": "results/Huggy/Huggy/Huggy-1999944.onnx",
"reward": 3.7639129360516868,
"creation_time": 1674037050.4846268,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999944.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy/Huggy/Huggy-2000022.onnx",
"reward": 3.7437008160811205,
"creation_time": 1674037050.6346717,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7437008160811205,
"creation_time": 1674037050.6346717,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}