ppo-Huggy / run_logs /training_status.json
ashokdavas's picture
Huggy
ed32c91
{
"Huggy": {
"checkpoints": [
{
"steps": 199966,
"file_path": "results/Huggy/Huggy/Huggy-199966.onnx",
"reward": 3.2436068208911752,
"creation_time": 1693406455.939619,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199966.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy/Huggy/Huggy-399989.onnx",
"reward": 3.602521750834081,
"creation_time": 1693406708.3317928,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599601,
"file_path": "results/Huggy/Huggy/Huggy-599601.onnx",
"reward": 4.202232720273914,
"creation_time": 1693406960.6422532,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599601.pt"
]
},
{
"steps": 799921,
"file_path": "results/Huggy/Huggy/Huggy-799921.onnx",
"reward": 4.030739052002656,
"creation_time": 1693407206.4353747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799921.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy/Huggy/Huggy-999969.onnx",
"reward": 3.7576427834588775,
"creation_time": 1693407457.0589201,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199998,
"file_path": "results/Huggy/Huggy/Huggy-1199998.onnx",
"reward": 3.8149892614812266,
"creation_time": 1693407708.785573,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199998.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy/Huggy/Huggy-1399951.onnx",
"reward": 4.1959088416326615,
"creation_time": 1693407965.3299007,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599741,
"file_path": "results/Huggy/Huggy/Huggy-1599741.onnx",
"reward": 3.560789604374415,
"creation_time": 1693408219.3194988,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599741.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy/Huggy/Huggy-1799969.onnx",
"reward": 3.9567004552909304,
"creation_time": 1693408469.2376137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999964,
"file_path": "results/Huggy/Huggy/Huggy-1999964.onnx",
"reward": 3.941484544728253,
"creation_time": 1693408724.4258304,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999964.pt"
]
},
{
"steps": 2000055,
"file_path": "results/Huggy/Huggy/Huggy-2000055.onnx",
"reward": 3.9580375639597576,
"creation_time": 1693408724.6163294,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
],
"final_checkpoint": {
"steps": 2000055,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9580375639597576,
"creation_time": 1693408724.6163294,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}