ppo-Huggy / run_logs /training_status.json
Toffee0705's picture
Huggy
4508d5c
{
"Huggy": {
"checkpoints": [
{
"steps": 199960,
"file_path": "results/Huggy/Huggy/Huggy-199960.onnx",
"reward": 3.2663797736167908,
"creation_time": 1684367643.672561,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199960.pt"
]
},
{
"steps": 399999,
"file_path": "results/Huggy/Huggy/Huggy-399999.onnx",
"reward": 3.5684657858477697,
"creation_time": 1684367899.944299,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399999.pt"
]
},
{
"steps": 599937,
"file_path": "results/Huggy/Huggy/Huggy-599937.onnx",
"reward": 4.200987678766251,
"creation_time": 1684368157.5666788,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599937.pt"
]
},
{
"steps": 799976,
"file_path": "results/Huggy/Huggy/Huggy-799976.onnx",
"reward": 3.818469978169183,
"creation_time": 1684368411.5257432,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799976.pt"
]
},
{
"steps": 999982,
"file_path": "results/Huggy/Huggy/Huggy-999982.onnx",
"reward": 3.8032669122471954,
"creation_time": 1684368671.5745478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999982.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy/Huggy/Huggy-1199976.onnx",
"reward": 3.7511282451450825,
"creation_time": 1684368931.4716215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399961,
"file_path": "results/Huggy/Huggy/Huggy-1399961.onnx",
"reward": 3.631453131963759,
"creation_time": 1684369180.6750984,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399961.pt"
]
},
{
"steps": 1599326,
"file_path": "results/Huggy/Huggy/Huggy-1599326.onnx",
"reward": 3.512463672955831,
"creation_time": 1684369429.714019,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599326.pt"
]
},
{
"steps": 1799881,
"file_path": "results/Huggy/Huggy/Huggy-1799881.onnx",
"reward": 3.2312855660915374,
"creation_time": 1684369667.9555497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799881.pt"
]
},
{
"steps": 1999480,
"file_path": "results/Huggy/Huggy/Huggy-1999480.onnx",
"reward": 3.34941834144378,
"creation_time": 1684369907.880346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999480.pt"
]
},
{
"steps": 2000230,
"file_path": "results/Huggy/Huggy/Huggy-2000230.onnx",
"reward": 3.3085508812739195,
"creation_time": 1684369908.0279794,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000230.pt"
]
}
],
"final_checkpoint": {
"steps": 2000230,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3085508812739195,
"creation_time": 1684369908.0279794,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000230.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}