import argparse
import os
import json
from pathlib import Path
import random
import pickle

from tqdm import tqdm
import numpy as np


def assert_bbox(xmin, ymin, xmax, ymax, image_size):
    if xmin > xmax:
        xmin, xmax = xmax, xmin
    if ymin > ymax:
        ymin, ymax = ymax, ymin
    xmin = min(max(0, xmin), image_size[0])
    xmax = min(max(0, xmax), image_size[0])
    ymin = min(max(0, ymin), image_size[1])
    ymax = min(max(0, ymax), image_size[1])

    assert xmin >= 0
    assert xmax <= image_size[0]
    assert ymin >= 0
    assert ymax <= image_size[1]
    assert xmin < xmax, f"{xmin}, {xmax}"
    assert ymin < ymax, f"{ymin}, {ymax}"
    return xmin, ymin, xmax, ymax


def read_json_label(label_file, image_dir, image_size, size_count):
    """读取json形式的标签文件"""
    label = {}
    label_file = Path(label_file)
    with label_file.open("r", encoding="utf-8") as f:
        label_info = json.load(f)
        image_path = image_dir / (label_file.stem + ".png")
        if image_path.is_file():
            label["filename"] = str(image_path)
            targets = []
            for target in label_info["shapes"]:
                if target["shape_type"] == "rectangle":
                    xmin, ymin = target["points"][0]
                    xmax, ymax = target["points"][1]
                    xmin, ymin, xmax, ymax = assert_bbox(xmin, ymin, xmax, ymax, image_size)
                    if xmax - xmin > 2 and ymax - ymin > 2:
                        targets.append([xmin, ymin, xmax, ymax, 0, 0])
                        size_count["W"].append(xmax - xmin)
                        size_count["H"].append(ymax - ymin)
                    else:
                        print(xmin, ymin, xmax, ymax)
            label["targets"] = targets
        else:
            return

    if len(targets) == 0:
        print(label_file)
        return
    else:
        return label


def sample_rename(json_file, image_dir, gcount: int):
    json_dir, file_name = os.path.split(json_file)
    new_json_file = os.path.join(json_dir, f"{gcount}.json")
    os.rename(json_file, new_json_file)
    os.rename(
        os.path.join(image_dir, file_name.replace(".json", ".png")),
        os.path.join(image_dir, f"{gcount}.png"),
    )
    return new_json_file


def split_train_val_set(labels, train_percent=0.9):
    """按照比例切分训练和验证集"""
    random.shuffle(labels)
    num_train = int(len(labels) * train_percent)
    return labels[:num_train], labels[num_train:]


def main(data_root):
    task_name = "ae"
    image_size = (3840, 2160)

    sample_splits = ["all"]
    output_file_path = "./resources/data"

    labels = []
    size_count = {"H": [], "W": []}

    global_count = 0

    pos_splits = os.listdir(os.path.join(data_root, "pos"))
    for split in pos_splits:
        image_file_dir = Path(data_root) / "pos" / split / "images"
        label_file_dir = Path(data_root) / "pos" / split / "labels"
        file_list = Path(label_file_dir).glob("*.json")
        for x in tqdm(file_list, desc=split):
            # x = sample_rename(x, image_file_dir, global_count)
            global_count += 1
            if (label := read_json_label(x, image_file_dir, image_size, size_count)) is not None:
                labels.append(label)

    neg_splits = os.listdir(os.path.join(data_root, "neg"))
    for split in neg_splits:
        image_file_dir = Path(data_root) / "neg" / split
        file_list = Path(image_file_dir).glob("*.png")
        for x in tqdm(file_list, desc=split):
            label = {"filename": str(x), "targets": []}
            labels.append(label)

    print("Size Hist:")
    print(np.histogram(np.array(size_count["W"])))
    print(np.histogram(np.array(size_count["H"])))

    Path(output_file_path).mkdir(parents=True, exist_ok=True)

    train_list, val_list = split_train_val_set(labels, 0.95)
    print(f"Train: {len(train_list)}")
    print(f"Val: {len(val_list)}")

    with (Path(output_file_path) / f"{task_name}_samples.pickle").open(mode="wb") as f:
        pickle.dump(labels, f)
    with (Path(output_file_path) / f"{task_name}_samples_train.pickle").open(mode="wb") as f:
        pickle.dump(train_list, f)
    with (Path(output_file_path) / f"{task_name}_samples_val.pickle").open(mode="wb") as f:
        pickle.dump(val_list, f)
    return


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--data_root", type=str)
    # parser.add_argument("--splits", default="all", type=str)
    args = parser.parse_args()

    main(args.data_root)
