ppo-Huggy / run_logs /training_status.json
AgentXXX's picture
Huggy
821e995
{
"Huggy": {
"checkpoints": [
{
"steps": 199845,
"file_path": "results/Huggy/Huggy/Huggy-199845.onnx",
"reward": 3.5025850040838122,
"creation_time": 1671312096.7021084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199845.pt"
]
},
{
"steps": 399880,
"file_path": "results/Huggy/Huggy/Huggy-399880.onnx",
"reward": 3.751641409519391,
"creation_time": 1671312322.1015081,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399880.pt"
]
},
{
"steps": 599887,
"file_path": "results/Huggy/Huggy/Huggy-599887.onnx",
"reward": 3.9002663918903897,
"creation_time": 1671312549.9019952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599887.pt"
]
},
{
"steps": 799996,
"file_path": "results/Huggy/Huggy/Huggy-799996.onnx",
"reward": 3.8688826022301126,
"creation_time": 1671312776.4071853,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799996.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy/Huggy/Huggy-999955.onnx",
"reward": 3.8215244169746128,
"creation_time": 1671313004.6828282,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199939,
"file_path": "results/Huggy/Huggy/Huggy-1199939.onnx",
"reward": 3.7357666575246387,
"creation_time": 1671313233.9304113,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199939.pt"
]
},
{
"steps": 1399813,
"file_path": "results/Huggy/Huggy/Huggy-1399813.onnx",
"reward": 3.8663109172222225,
"creation_time": 1671313460.8970602,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399813.pt"
]
},
{
"steps": 1599954,
"file_path": "results/Huggy/Huggy/Huggy-1599954.onnx",
"reward": 3.5117324897030877,
"creation_time": 1671313693.4511197,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599954.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy/Huggy/Huggy-1799969.onnx",
"reward": 3.7447150789418266,
"creation_time": 1671313928.0129879,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999732,
"file_path": "results/Huggy/Huggy/Huggy-1999732.onnx",
"reward": 3.701723274730501,
"creation_time": 1671314158.5675366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999732.pt"
]
},
{
"steps": 2000482,
"file_path": "results/Huggy/Huggy/Huggy-2000482.onnx",
"reward": 3.3764818093993445,
"creation_time": 1671314158.7172463,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000482.pt"
]
}
],
"final_checkpoint": {
"steps": 2000482,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3764818093993445,
"creation_time": 1671314158.7172463,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000482.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}