{ "Huggy": { "checkpoints": [ { "steps": 199787, "file_path": "results/Huggy/Huggy/Huggy-199787.onnx", "reward": 3.2971875343252632, "creation_time": 1687009675.5652838, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-199787.pt" ] }, { "steps": 399966, "file_path": "results/Huggy/Huggy/Huggy-399966.onnx", "reward": 4.143973943639974, "creation_time": 1687009929.3803203, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-399966.pt" ] }, { "steps": 599930, "file_path": "results/Huggy/Huggy/Huggy-599930.onnx", "reward": 3.370088890194893, "creation_time": 1687010181.661892, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-599930.pt" ] }, { "steps": 799684, "file_path": "results/Huggy/Huggy/Huggy-799684.onnx", "reward": 3.8825273882032065, "creation_time": 1687010433.1539822, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-799684.pt" ] }, { "steps": 999998, "file_path": "results/Huggy/Huggy/Huggy-999998.onnx", "reward": 3.8316523613114106, "creation_time": 1687010687.9704492, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999998.pt" ] }, { "steps": 1199867, "file_path": "results/Huggy/Huggy/Huggy-1199867.onnx", "reward": 3.5776329308748247, "creation_time": 1687010938.4624119, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1199867.pt" ] }, { "steps": 1399952, "file_path": "results/Huggy/Huggy/Huggy-1399952.onnx", "reward": 4.511016573224749, "creation_time": 1687011187.3272617, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1399952.pt" ] }, { "steps": 1599955, "file_path": "results/Huggy/Huggy/Huggy-1599955.onnx", "reward": 3.6064964499177234, "creation_time": 1687011439.8558347, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1599955.pt" ] }, { "steps": 1799786, "file_path": "results/Huggy/Huggy/Huggy-1799786.onnx", "reward": 4.011360625425975, "creation_time": 1687011698.6505194, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1799786.pt" ] }, { "steps": 1999910, "file_path": "results/Huggy/Huggy/Huggy-1999910.onnx", "reward": 3.621603718170753, "creation_time": 1687011960.9768367, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999910.pt" ] }, { "steps": 2000660, "file_path": "results/Huggy/Huggy/Huggy-2000660.onnx", "reward": 3.484933817161704, "creation_time": 1687011961.1248944, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000660.pt" ] } ], "final_checkpoint": { "steps": 2000660, "file_path": "results/Huggy/Huggy.onnx", "reward": 3.484933817161704, "creation_time": 1687011961.1248944, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000660.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.31.0.dev0", "torch_version": "1.11.0+cu102" } }