{ "Huggy": { "checkpoints": [ { "steps": 1399977, "file_path": "results/Huggy2/Huggy/Huggy-1399977.onnx", "reward": 3.8015785217285156, "creation_time": 1711411091.05452, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1399977.pt" ] }, { "steps": 1599934, "file_path": "results/Huggy2/Huggy/Huggy-1599934.onnx", "reward": 3.474546171959091, "creation_time": 1711411240.4027383, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1599934.pt" ] }, { "steps": 1799955, "file_path": "results/Huggy2/Huggy/Huggy-1799955.onnx", "reward": 3.5397074126649177, "creation_time": 1711411391.3280451, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1799955.pt" ] }, { "steps": 1999986, "file_path": "results/Huggy2/Huggy/Huggy-1999986.onnx", "reward": 3.6814975177540497, "creation_time": 1711411542.6980746, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1999986.pt" ] }, { "steps": 2000031, "file_path": "results/Huggy2/Huggy/Huggy-2000031.onnx", "reward": 3.679797158922468, "creation_time": 1711411542.8436747, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-2000031.pt" ] } ], "final_checkpoint": { "steps": 2000031, "file_path": "results/Huggy2/Huggy.onnx", "reward": 3.679797158922468, "creation_time": 1711411542.8436747, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-2000031.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "1.1.0.dev0", "torch_version": "2.2.1+cu121" } }