ppo-Huggy / run_logs /training_status.json
dmenini's picture
Huggy
3846fe5
{
"Huggy": {
"checkpoints": [
{
"steps": 199905,
"file_path": "results/Huggy/Huggy/Huggy-199905.onnx",
"reward": 2.89534878049578,
"creation_time": 1677960472.8633776,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199905.pt"
]
},
{
"steps": 399715,
"file_path": "results/Huggy/Huggy/Huggy-399715.onnx",
"reward": 3.6324219173855252,
"creation_time": 1677960704.993357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399715.pt"
]
},
{
"steps": 599975,
"file_path": "results/Huggy/Huggy/Huggy-599975.onnx",
"reward": 3.6228644888976524,
"creation_time": 1677960933.5377111,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599975.pt"
]
},
{
"steps": 799879,
"file_path": "results/Huggy/Huggy/Huggy-799879.onnx",
"reward": 3.852702898692481,
"creation_time": 1677961164.1972413,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799879.pt"
]
},
{
"steps": 999982,
"file_path": "results/Huggy/Huggy/Huggy-999982.onnx",
"reward": 3.827233744933542,
"creation_time": 1677961398.5775347,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999982.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy/Huggy/Huggy-1199999.onnx",
"reward": 3.501168659475983,
"creation_time": 1677961633.5478115,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 6.058708190917969,
"creation_time": 1677961869.0883765,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599836,
"file_path": "results/Huggy/Huggy/Huggy-1599836.onnx",
"reward": 3.506897943255342,
"creation_time": 1677962103.8778899,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599836.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 4.077420898675919,
"creation_time": 1677962341.7152476,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999993,
"file_path": "results/Huggy/Huggy/Huggy-1999993.onnx",
"reward": 3.8863423507789085,
"creation_time": 1677962576.61981,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999993.pt"
]
},
{
"steps": 2000020,
"file_path": "results/Huggy/Huggy/Huggy-2000020.onnx",
"reward": 3.816770515839259,
"creation_time": 1677962576.736703,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000020.pt"
]
}
],
"final_checkpoint": {
"steps": 2000020,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.816770515839259,
"creation_time": 1677962576.736703,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000020.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}