{ "Huggy": { "checkpoints": [ { "steps": 199777, "file_path": "results/Huggy/Huggy/Huggy-199777.onnx", "reward": 3.413066661547101, "creation_time": 1691234542.2263682, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-199777.pt" ] }, { "steps": 399913, "file_path": "results/Huggy/Huggy/Huggy-399913.onnx", "reward": 3.8559256326407194, "creation_time": 1691234778.257073, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-399913.pt" ] }, { "steps": 599937, "file_path": "results/Huggy/Huggy/Huggy-599937.onnx", "reward": 4.646418711718391, "creation_time": 1691235018.338573, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-599937.pt" ] }, { "steps": 799967, "file_path": "results/Huggy/Huggy/Huggy-799967.onnx", "reward": 3.9053766006376684, "creation_time": 1691235259.4388618, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-799967.pt" ] }, { "steps": 999876, "file_path": "results/Huggy/Huggy/Huggy-999876.onnx", "reward": 3.77255156463471, "creation_time": 1691235501.9697113, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999876.pt" ] }, { "steps": 1199969, "file_path": "results/Huggy/Huggy/Huggy-1199969.onnx", "reward": 3.975007079638444, "creation_time": 1691235748.8265374, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1199969.pt" ] }, { "steps": 1399897, "file_path": "results/Huggy/Huggy/Huggy-1399897.onnx", "reward": 1.0230327447255452, "creation_time": 1691235992.979168, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1399897.pt" ] }, { "steps": 1599949, "file_path": "results/Huggy/Huggy/Huggy-1599949.onnx", "reward": 3.749732791769261, "creation_time": 1691236233.8916087, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1599949.pt" ] }, { "steps": 1799994, "file_path": "results/Huggy/Huggy/Huggy-1799994.onnx", "reward": 4.0582070361013, "creation_time": 1691236475.6523752, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1799994.pt" ] }, { "steps": 1999959, "file_path": "results/Huggy/Huggy/Huggy-1999959.onnx", "reward": 3.26699303984642, "creation_time": 1691236720.1691952, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999959.pt" ] }, { "steps": 2000042, "file_path": "results/Huggy/Huggy/Huggy-2000042.onnx", "reward": 3.290972122331945, "creation_time": 1691236720.2914531, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000042.pt" ] } ], "final_checkpoint": { "steps": 2000042, "file_path": "results/Huggy/Huggy.onnx", "reward": 3.290972122331945, "creation_time": 1691236720.2914531, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000042.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.31.0.dev0", "torch_version": "1.11.0+cu102" } }