{ "Huggy": { "checkpoints": [ { "steps": 499958, "file_path": "results/Huggy/Huggy/Huggy-499958.onnx", "reward": 3.7460114317280904, "creation_time": 1670349435.765839, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-499958.pt" ] }, { "steps": 999914, "file_path": "results/Huggy/Huggy/Huggy-999914.onnx", "reward": 3.970517189162118, "creation_time": 1670350037.6288755, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999914.pt" ] }, { "steps": 1499945, "file_path": "results/Huggy/Huggy/Huggy-1499945.onnx", "reward": 3.5814410350607626, "creation_time": 1670350634.5938494, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1499945.pt" ] }, { "steps": 1999953, "file_path": "results/Huggy/Huggy/Huggy-1999953.onnx", "reward": 3.2274825317519054, "creation_time": 1670351233.3894577, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999953.pt" ] }, { "steps": 2000076, "file_path": "results/Huggy/Huggy/Huggy-2000076.onnx", "reward": 3.4525089979171755, "creation_time": 1670351233.5201578, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000076.pt" ] } ], "final_checkpoint": { "steps": 2000076, "file_path": "results/Huggy/Huggy.onnx", "reward": 3.4525089979171755, "creation_time": 1670351233.5201578, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000076.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.8.1+cu102" } }