"""用于将来自HG产生的假设陨石坑和HV验证的算法结合后输出真实的陨石坑标签，并将其反投影至经纬度坐标上
usage:
    python main.py -c /home/a804_cbf/Code/luner_crater/config/reader.yaml -o /disk527/sdb1/a804_cbf/datasets/chang_e -g

"""

import os
import cv2
import yaml
import argparse
from utils.reader import from_TIFF, from_IMG, to_labels, to_latlons
from HV import dem_crater_filter, crater_filter
from HG.transfer import instantiate_from_config
from typing import Iterable
from utils.util import recursive_to_device
import tqdm


def arg_parser():
    arg_parse = argparse.ArgumentParser()
    arg_parse.add_argument(
        "-c",
        "--config_path",
        type=str,
        default="/home/a804_cbf/Code/luner_crater/config/reader.yaml",
        help="The path of the configuration file in yaml format",
    )
    arg_parse.add_argument(
        "-o",
        "--output_dir",
        type=str,
        default="/disk527/sdb1/a804_cbf/datasets/chang_e",
        help="The path of the output directory",
    )
    arg_parse.add_argument(
        "-g",
        "--hypothesis-generate",
        action="store_true",
        help="Generate hypothesis crater",
    )
    arg_parse.add_argument(
        "-v",
        "--hypothesis-verify",
        action="store_true",
        help="Verify hypothesis crater",
    )
    arg_parse.add_argument(
        "-f",
        "--hypothesis-filter",
        action="store_true",
        help="Filter hypothesis crater",
    )
    return arg_parse.parse_args()


def TransferRCNN(
    model_config: dict, data_config: dict, **trainer_kwargs
) -> Iterable[tuple[dict, tuple[dict, dict]]]:
    ## only for single GPU inference ##
    assert len(os.environ["CUDA_VISIBLE_DEVICES"].split(",")) == 1
    ## model ##
    model = instantiate_from_config(model_config)
    model.eval()
    model.to("cuda")
    ## prepare data ##
    data_module = instantiate_from_config(data_config)
    data_module.setup("predict")
    ## predict ##
    for batch_idx, batch in tqdm.tqdm(
        enumerate(data_module.predict_dataloader()),
        desc="Predicting",
        total=len(data_module.predict_dataloader()),
    ):
        batch = recursive_to_device(batch, "cuda")
        output = model.predict_step(batch, batch_idx)
        yield recursive_to_device(output, "cpu")


def hypothesis_generate(output_dir, config):
    if not os.path.exists(os.path.join(output_dir, "boxes")):
        os.makedirs(os.path.join(output_dir, "boxes"))
    if not os.path.exists(os.path.join(output_dir, "predicts")):
        os.makedirs(os.path.join(output_dir, "predicts"))
    for log, (batch, prediction) in TransferRCNN(
        config["model"], config["data"], **config["trainer"]
    ):
        for detect, name, pred in zip(log["predict/detect"], batch["name"], prediction):
            cv2.imwrite(
                os.path.join(output_dir, "predicts", name + ".png"),
                detect.cpu().numpy(),
            )
            with open(os.path.join(output_dir, "boxes", name + ".txt"), "w") as f:
                f.write("x1,y1,x2,y2,conf\n")
                for bbox, score in zip(pred["boxes"].tolist(), pred["scores"].tolist()):
                    f.write(
                        f"{bbox[0]:.4f},{bbox[1]:.4f},{bbox[2]:.4f},{bbox[3]:.4f},{score:.4f}\n"
                    )


def hypothesis_verify(nac_config, dem_config, subfig_config):
    dem_prj = from_TIFF(**dem_config, **subfig_config)
    nac_prj = from_IMG(nac_config, **subfig_config)
    root_dir = subfig_config["root_dir"]
    nac = (
        (nac_prj.image - nac_prj.image.min())
        / (nac_prj.image.max() - nac_prj.image.min())
        * 255
    )
    nac = cv2.cvtColor(nac, cv2.COLOR_GRAY2BGR)
    with open(os.path.join(root_dir, "crater_catalog.txt"), "w") as f:
        files = os.listdir(os.path.join(root_dir, "boxes"))
        for file in tqdm.tqdm(map(lambda x: x.split(".")[0], files), total=len(files)):
            points_path = os.path.join(root_dir, "boxes", file + ".txt")
            catalog_iter = dem_crater_filter(
                nac_prj, dem_prj, to_latlons(nac_prj, dem_prj, points_path)
            )
            # catalog_iter = to_latlons(nac_prj, dem_prj, points_path)
            for lat, lon, r in catalog_iter:
                x, y = nac_prj.forward(lat, lon)
                cv2.circle(nac, (round(x), round(y)), round(r), (0, 0, 255), 2)
                print(f"{lat},{lon},{r}", file=f, flush=True)
    cv2.imwrite("nac.png", nac)


def hypothesis_filter(nac_config, dem_config, subfig_config):
    dem_prj = from_TIFF(**dem_config, **subfig_config)
    nac_prj = from_IMG(nac_config, **subfig_config)
    root_dir = subfig_config["root_dir"]
    craters = []
    with open(os.path.join(root_dir, "crater_catalog.txt"), "r") as f:
        for line in f:
            lat, lon, r = map(float, line.split(","))
            x, y = nac_prj.forward(lat, lon)
            craters.append((x, y, r))
    # 作一次聚类，将圆心距离小于某个阈值的陨石坑合并
    with open(os.path.join(root_dir, "crater_catalog_filtered.txt"), "w") as f:
        for lat, lon, r in crater_filter(
            craters, dem_prj, nac_prj, depth_th=0.2, fluctuation_th=1
        ):
            print(f"{lat},{lon},{r}", file=f, flush=True)


if __name__ == "__main__":
    args = arg_parser()
    with open(args.config_path, "r") as f:
        config = yaml.load(f, Loader=yaml.FullLoader)
    if args.hypothesis_generate:
        hypothesis_generate(args.output_dir, config["detector"])
    if args.hypothesis_verify:
        nac_config = config["nac"]
        dem_config = config["dem"]
        subfig_config = config["subfig"]
        hypothesis_verify(nac_config, dem_config, subfig_config)
    if args.hypothesis_filter:
        nac_config = config["nac"]
        dem_config = config["dem"]
        subfig_config = config["subfig"]
        hypothesis_filter(nac_config, dem_config, subfig_config)
