{ "Huggy": { "checkpoints": [ { "steps": 10905, "file_path": "results/Huggy2/Huggy/Huggy-10905.onnx", "reward": 1.563771633638276, "creation_time": 1719692786.4835095, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-10905.pt" ] } ], "final_checkpoint": { "steps": 10905, "file_path": "results/Huggy2/Huggy.onnx", "reward": 1.563771633638276, "creation_time": 1719692786.4835095, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-10905.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "1.1.0.dev0", "torch_version": "2.3.0+cu121" } }