{ "Huggy": { "checkpoints": [ { "steps": 199899, "file_path": "results/Huggy2/Huggy/Huggy-199899.onnx", "reward": 3.2083018347620964, "creation_time": 1718353048.565127, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-199899.pt" ] }, { "steps": 399920, "file_path": "results/Huggy2/Huggy/Huggy-399920.onnx", "reward": 3.731573632065679, "creation_time": 1718353494.5588298, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-399920.pt" ] }, { "steps": 599927, "file_path": "results/Huggy2/Huggy/Huggy-599927.onnx", "reward": 4.332828250196245, "creation_time": 1718353951.8737936, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-599927.pt" ] }, { "steps": 799980, "file_path": "results/Huggy2/Huggy/Huggy-799980.onnx", "reward": 3.9172810834924174, "creation_time": 1718354398.7373905, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-799980.pt" ] }, { "steps": 999976, "file_path": "results/Huggy2/Huggy/Huggy-999976.onnx", "reward": 3.9479085298684926, "creation_time": 1718354861.4763503, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-999976.pt" ] }, { "steps": 1199939, "file_path": "results/Huggy2/Huggy/Huggy-1199939.onnx", "reward": 3.8761499733119815, "creation_time": 1718355336.206139, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1199939.pt" ] }, { "steps": 1399996, "file_path": "results/Huggy2/Huggy/Huggy-1399996.onnx", "reward": 3.8570408821105957, "creation_time": 1718355805.139352, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1399996.pt" ] }, { "steps": 1599947, "file_path": "results/Huggy2/Huggy/Huggy-1599947.onnx", "reward": 3.741940833801447, "creation_time": 1718356257.3007464, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1599947.pt" ] }, { "steps": 1799916, "file_path": "results/Huggy2/Huggy/Huggy-1799916.onnx", "reward": 3.9586225196167275, "creation_time": 1718356730.00063, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1799916.pt" ] }, { "steps": 1999350, "file_path": "results/Huggy2/Huggy/Huggy-1999350.onnx", "reward": 3.9355403643388014, "creation_time": 1718357201.4747474, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1999350.pt" ] }, { "steps": 2000100, "file_path": "results/Huggy2/Huggy/Huggy-2000100.onnx", "reward": 3.639578766292996, "creation_time": 1718357201.6398437, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-2000100.pt" ] } ], "final_checkpoint": { "steps": 2000100, "file_path": "results/Huggy2/Huggy.onnx", "reward": 3.639578766292996, "creation_time": 1718357201.6398437, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-2000100.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "1.1.0.dev0", "torch_version": "2.3.0+cu121" } }