ppo-Huggy / run_logs /training_status.json
samuelabc's picture
Upload folder using huggingface_hub
de5a453
{
"Huggy": {
"checkpoints": [
{
"steps": 199854,
"file_path": "results/Huggy/Huggy/Huggy-199854.onnx",
"reward": 3.36027886679298,
"creation_time": 1682576127.3000648,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199854.pt"
]
},
{
"steps": 399943,
"file_path": "results/Huggy/Huggy/Huggy-399943.onnx",
"reward": 3.6921137535855886,
"creation_time": 1682576371.992132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399943.pt"
]
},
{
"steps": 599990,
"file_path": "results/Huggy/Huggy/Huggy-599990.onnx",
"reward": 3.6552946139604616,
"creation_time": 1682576619.3582492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599990.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.776966779896643,
"creation_time": 1682576864.8083029,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999992,
"file_path": "results/Huggy/Huggy/Huggy-999992.onnx",
"reward": 3.9536702826425625,
"creation_time": 1682577115.436071,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999992.pt"
]
},
{
"steps": 1199993,
"file_path": "results/Huggy/Huggy/Huggy-1199993.onnx",
"reward": 3.7206257672997207,
"creation_time": 1682577364.6115227,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199993.pt"
]
},
{
"steps": 1399961,
"file_path": "results/Huggy/Huggy/Huggy-1399961.onnx",
"reward": 3.874755822323464,
"creation_time": 1682577612.1211877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399961.pt"
]
},
{
"steps": 1599948,
"file_path": "results/Huggy/Huggy/Huggy-1599948.onnx",
"reward": 3.8770756668904247,
"creation_time": 1682577849.3017204,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599948.pt"
]
},
{
"steps": 1799708,
"file_path": "results/Huggy/Huggy/Huggy-1799708.onnx",
"reward": 3.5772060326167514,
"creation_time": 1682578080.1376784,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799708.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy/Huggy/Huggy-1999991.onnx",
"reward": 3.9136832498368763,
"creation_time": 1682578309.6107469,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000057,
"file_path": "results/Huggy/Huggy/Huggy-2000057.onnx",
"reward": 3.914386497810483,
"creation_time": 1682578309.7269113,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000057.pt"
]
}
],
"final_checkpoint": {
"steps": 2000057,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.914386497810483,
"creation_time": 1682578309.7269113,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000057.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}