{ "Huggy": { "checkpoints": [ { "steps": 499849, "file_path": "results\\Huggy_1\\Huggy\\Huggy-499849.onnx", "reward": 3.8409776226166756, "creation_time": 1650462766.9563007, "auxillary_file_paths": [ "results\\Huggy_1\\Huggy\\Huggy-499849.pt" ] }, { "steps": 999633, "file_path": "results\\Huggy_1\\Huggy\\Huggy-999633.onnx", "reward": 3.8536432950924606, "creation_time": 1650463501.1990354, "auxillary_file_paths": [ "results\\Huggy_1\\Huggy\\Huggy-999633.pt" ] }, { "steps": 1499331, "file_path": "results\\Huggy_1\\Huggy\\Huggy-1499331.onnx", "reward": 3.8168014070605705, "creation_time": 1650464251.328924, "auxillary_file_paths": [ "results\\Huggy_1\\Huggy\\Huggy-1499331.pt" ] }, { "steps": 1999977, "file_path": "results\\Huggy_1\\Huggy\\Huggy-1999977.onnx", "reward": 3.819167050448331, "creation_time": 1650464973.5974445, "auxillary_file_paths": [ "results\\Huggy_1\\Huggy\\Huggy-1999977.pt" ] }, { "steps": 2000001, "file_path": "results\\Huggy_1\\Huggy\\Huggy-2000001.onnx", "reward": 3.7095100827839063, "creation_time": 1650464973.7289164, "auxillary_file_paths": [ "results\\Huggy_1\\Huggy\\Huggy-2000001.pt" ] } ], "final_checkpoint": { "steps": 2000001, "file_path": "results\\Huggy_1\\Huggy.onnx", "reward": 3.7095100827839063, "creation_time": 1650464973.7289164, "auxillary_file_paths": [ "results\\Huggy_1\\Huggy\\Huggy-2000001.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.7.1+cu110" } }