Huggy / run_logs /training_status.json
Addwater's picture
Huggy
d2769c9
{
"Huggy": {
"checkpoints": [
{
"steps": 199925,
"file_path": "results/Huggy/Huggy/Huggy-199925.onnx",
"reward": 3.562674533794908,
"creation_time": 1671420671.8133132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199925.pt"
]
},
{
"steps": 399895,
"file_path": "results/Huggy/Huggy/Huggy-399895.onnx",
"reward": 4.159920680878767,
"creation_time": 1671420898.1495798,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399895.pt"
]
},
{
"steps": 599984,
"file_path": "results/Huggy/Huggy/Huggy-599984.onnx",
"reward": 4.322752842903137,
"creation_time": 1671421127.2427707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599984.pt"
]
},
{
"steps": 799924,
"file_path": "results/Huggy/Huggy/Huggy-799924.onnx",
"reward": 3.7623324744509294,
"creation_time": 1671421353.0559068,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799924.pt"
]
},
{
"steps": 999864,
"file_path": "results/Huggy/Huggy/Huggy-999864.onnx",
"reward": 4.09033789302482,
"creation_time": 1671421576.0706713,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999864.pt"
]
},
{
"steps": 1199956,
"file_path": "results/Huggy/Huggy/Huggy-1199956.onnx",
"reward": 3.490741851074355,
"creation_time": 1671421808.0399835,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199956.pt"
]
},
{
"steps": 1399939,
"file_path": "results/Huggy/Huggy/Huggy-1399939.onnx",
"reward": 3.944641576093786,
"creation_time": 1671422035.0863082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399939.pt"
]
},
{
"steps": 1599957,
"file_path": "results/Huggy/Huggy/Huggy-1599957.onnx",
"reward": 3.819511943959935,
"creation_time": 1671422257.9597082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599957.pt"
]
},
{
"steps": 1799928,
"file_path": "results/Huggy/Huggy/Huggy-1799928.onnx",
"reward": 4.0643220466326895,
"creation_time": 1671422491.415892,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799928.pt"
]
},
{
"steps": 1999966,
"file_path": "results/Huggy/Huggy/Huggy-1999966.onnx",
"reward": 4.116536019025026,
"creation_time": 1671422722.6407108,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999966.pt"
]
},
{
"steps": 2000063,
"file_path": "results/Huggy/Huggy/Huggy-2000063.onnx",
"reward": 4.163889158855785,
"creation_time": 1671422722.7596054,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000063.pt"
]
}
],
"final_checkpoint": {
"steps": 2000063,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.163889158855785,
"creation_time": 1671422722.7596054,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000063.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}