import os
from pathlib import Path
from functools import partial

import numpy as np

from tools.json_helper import json_helper
from tools.dataset_helper import open_img_np, plot_cmd
from tools.select_patch import crn_cmd


def open_os(
    src_file, *, scale=0, **kwargs
):
    """
    open os image as numpy ndarray

    Param
    -----
    scale : change to range of [0 scale] after normalize
            no change range if scale == 0

    Note
    -----
    plt.imread read png as float with [0, 1]
    otherwise int 
    """
    if scale == 0:
        max_ = np.array([0., 0., 0.])
    else:
        max_ = np.array([1., 1., 1.])

    img_np = open_img_np(
        src_file, max_=max_, scale=scale, **kwargs
    )

    return img_np


def get_os_modal(filename):
    fn = os.path.basename(filename)
    return fn[:3]


def get_os_filelist(dset_dir, *, relpath=True, save_dir=""):
    '''
    将os地全部图片写入json文件，并分别生成每个场景的json文件
    '''
    p = Path(dset_dir)

    fn = lambda x : str(x)
    if relpath:
        fn = lambda x : os.path.relpath(x, p)


    for size in p.iterdir():
        if not size.is_dir():
            continue

        json_ = {'OPT': [], 'SAR': []}
        for folder in size.iterdir():
            if not folder.is_dir():
                continue

            cur_json = {'OPT': [], 'SAR': []}

            for filename in folder.iterdir():
                if not filename.is_file():
                    continue

                if get_os_modal(filename) == "opt":
                    cur_json["OPT"].append(fn(filename))
                elif get_os_modal(filename) == "sar":
                    cur_json["SAR"].append(fn(filename))

            jh = json_helper(dict_=cur_json)
            jh.to_json(save_dir=os.path.join(save_dir, f"{str(os.path.basename(size))}_{str(os.path.basename(folder))}.json"))

            for k in json_.keys():
                json_[k].extend(cur_json[k])

        jh = json_helper(dict_=json_)
        jh.to_json(save_dir=os.path.join(save_dir, f"{str(os.path.basename(size))}.json"))


def fn2wkt_os(path: str) -> str:
    """
    image name to wkt
    
    Param
    -----
    sample the identifier of sample, e.g. path or point
    """
    fn = os.path.splitext(os.path.basename(path))[0]
    fn = fn[3:]
    dir_ = os.path.dirname(path)
    folder = os.path.basename(dir_)
    dir_ = os.path.dirname(dir_)
    size = os.path.basename(dir_)
    
    return str((size, folder, fn))


def wkt2fn_os(wkt: str, modal:str) -> str:
    wkt = wkt.strip("()")
    wkt_list = wkt.split(",")
    size = wkt_list[0].strip("' ")
    folder = wkt_list[1].strip("' ")
    name = wkt_list[2].strip("' ")
    name = modal + name + ".png"

    fn = os.path.join(size, folder, name)

    return fn


if __name__ == "__main__1":
    path = "E:/datasets/OSdataset/256/test/opt3.png"
    wkt = fn2wkt_os(path)
    print(wkt)
    fn = wkt2fn_os(wkt, modal="opt")
    print(fn)


# patch match samples of val
if __name__ == "__main__1":

    import json
    import random

    def read_val_json(path):
        with open(path, 'r') as fp:
            files = json.load(fp)

        pairs = []
        for o, s in zip(files["OPT"], files["SAR"]):
            cur = [o, s]
            pairs.append(cur)
        
        return pairs

    def get_neg_pairs(num_diff, pair):
        pairs = []
        size = num_diff
        reservoir = []

        for p in pair: 

            o, s = p[0], p[1]
            if len(reservoir) >= size:
                random.shuffle(reservoir)
                neg_o = reservoir[-1]
                reservoir = reservoir[:-1]
                cur = [neg_o, s]
                pairs.append(cur)

            reservoir.append(o)

        return pairs

    path = "E:/workspace/SOMatch/json/os-select/1-s-test.json"
    pos_pair = read_val_json(path)
    neg_pair = get_neg_pairs(50, pos_pair)

    pos_opt = list(map(lambda x : x[0], pos_pair))
    pos_sar = list(map(lambda x : x[1], pos_pair))
    neg_opt = list(map(lambda x : x[0], neg_pair))
    neg_sar = list(map(lambda x : x[1], neg_pair))

    opt_list = pos_opt + neg_opt
    sar_list = pos_sar + neg_sar
    labels = [1] * len(pos_opt) + [0] * len(neg_opt)

    content = {
        "OPT" : opt_list, 
        "SAR" : sar_list, 
        "LABEL" : labels, 
    }

    with open("E:/workspace/SOMatch/json/os-select/1-s-test-pair.json", "w") as fp:
        json.dump(content, fp, indent="\t", sort_keys=True)


# patch match sample with 1/2 overlap
if __name__ == "__main__1":
    import random
    import json
    import cv2 as cv

    def postive_coord():
        row_ids = np.array(list(range(3)))
        col_ids = np.array(list(range(3)))
        coord = np.meshgrid(row_ids, col_ids)
        coord_ids = np.stack((coord[1], coord[0]), axis=2)
        coord_ids = coord_ids.reshape(-1, 2)

        ids1 = coord_ids
        ids2 = np.copy(coord_ids)
        ids1 = np.expand_dims(ids1, axis=1) 
        ids2 = np.expand_dims(ids2, axis=0) 
        dist = np.sum(np.abs(ids1 - ids2), axis=2)
        zero = np.expand_dims(np.zeros_like(dist), axis=2)
        ids1 = ids1 + zero
        ids2 = ids2 + zero
        coordnates = np.stack((ids1, ids2), axis=2)
        
        same = coordnates[dist==0]
        overlap = coordnates[dist==1]
        same = np.array(same, dtype=np.int16)
        overlap = np.array(overlap, dtype=np.int16)

        return same, overlap

    def coord_generator(max_iter=5):
        same, overlap = postive_coord()
        same_l, overlap_l = same.shape[0], overlap.shape[0]
        same_l = list(range(same_l))
        overlap_l = list(range(overlap_l))
        samples = 4

        for i in range(max_iter):
            cur_sample = 0
            prob = random.randint(0, 1)

            s_coord = np.empty((0, 2, 2))
            if prob == 1:
                s_ids = random.sample(same_l, 1)
                s_coord = same[s_ids]
                cur_sample += 1
            
            o_ids = random.sample(overlap_l, samples-cur_sample)
            o_coord = overlap[o_ids]
            # print(o_coord.shape)
            # print(s_coord.shape)
            ids = np.concatenate((s_coord, o_coord), axis=0)

            yield ids
        
        return None

    def read_merged_json(*args):
        pairs = []

        for path in args:
            with open(path, 'r') as fp:
                files = json.load(fp)

            for o, s in zip(files["OPT"], files["SAR"]):
                cur = [o, s]
                pairs.append(cur)

        return pairs

    class reservoir:

        def __init__(self, capacity=50):
            self.pool = []
            self.size = 0
            self.capacity = capacity

        def push(self, sample):
            self.pool.append(sample)
            self.size += 1

            if self.size > self.capacity:
                self.pool = self.pool[1:]
                self.size -= 1

        def pop(self):
            if self.size < self.capacity:
                return None

            random.shuffle(self.pool)
            ret = self.pool[0]
            self.pool = self.pool[1:]
            self.size -= 1
            
            return ret

    class pair_dset:
        
        def __init__(self, save_folder):
            self.pos_opt = []
            self.pos_sar = []
            self.neg_opt = []
            self.neg_sar = []
            self.folder = save_folder

        def add(self, x, kw="pos"):
            if kw == "pos":
                self.pos_opt.append(x["OPT"])
                self.pos_sar.append(x["SAR"])
            elif kw == "neg":
                self.neg_opt.append(x["OPT"])
                self.neg_sar.append(x["SAR"])

        def save(self, name="pair_dset.json"):
            opt_list = self.pos_opt + self.neg_opt
            sar_list = self.pos_sar + self.neg_sar
            labels = [1] * len(self.pos_opt) + [0] * len(self.neg_opt)

            content = {
                "OPT" : opt_list, 
                "SAR" : sar_list, 
                "LABEL" : labels, 
            }

            with open(os.path.join(self.folder, name), "w") as fp:
                json.dump(content, fp, indent="\t", sort_keys=True)

    def read_img(*path, org_folder=""):
        ret = []

        for p in path:
            img = cv.imread(os.path.join(org_folder, p))

            if len(img.shape) == 2:
                img = np.expand_dims(img, axis=2)

            h, w = img.shape[0], img.shape[1]
            img = img.mean(axis=2, keepdims=True).reshape(h, w, -1)
            ret.append(img)

        return tuple(ret)

    def transform_coord(coords):
        ret = np.array(128*coords, dtype=np.int16)
        return ret

    def slice_img(pair, coord):
        opt_patch, sar_patch = pair
        ret = []

        for c in coord:
            opt_c, sar_c = c[0], c[1]
            opt_end, sar_end = opt_c+256, sar_c+256

            opt_i = opt_patch[opt_c[1]:opt_end[1], opt_c[0]:opt_end[0]]
            sar_i = sar_patch[sar_c[1]:sar_end[1], sar_c[0]:sar_end[0]]

            ret.append((opt_i, sar_i))

        return tuple(ret)

    def generate_overlap_dset(*path, org_folder="",
        img_folder="", json_folder=""
    ):

        content = read_merged_json(*path)
        length = len(content)
        pool = reservoir()
        pd_json = pair_dset(json_folder)

        for i, coord in enumerate(coord_generator(length)):
            c = content[i]
            opt_path, sar_path = c[0], c[1]
            opt_patch, sar_patch = read_img(opt_path, sar_path, org_folder=org_folder)

            true_coord = transform_coord(coord)
            pairs = slice_img((opt_patch, sar_patch), true_coord)

            for p, crd in zip(pairs, coord):
                opt_name = f"opt_{i}_c{int(crd[0][0])}_{int(crd[0][1])}.png"
                sar_name = f"sar_{i}_c{int(crd[1][0])}_{int(crd[1][1])}.png"
                pd_json.add({"OPT": opt_name, "SAR": sar_name}, kw="pos")
                opt_i_path = os.path.join(img_folder, opt_name)
                sar_i_path  = os.path.join(img_folder, sar_name)
                cv.imwrite(opt_i_path, p[0])
                cv.imwrite(sar_i_path, p[1])

                neg = pool.pop()
                if not neg is None:
                    pd_json.add({"OPT": neg, "SAR": sar_name}, kw="neg")
                pool.push(opt_name)

        pd_json.save()

    # path = (
    #     "E:/workspace/SOMatch/json/os_dataset/512_train.json", 
    #     "E:/workspace/SOMatch/json/os_dataset/512_val.json"
    # )
    path = (
        "E:/workspace/SOMatch/json/os_dataset/512_test.json", 
    )
    generate_overlap_dset(*path, 
        org_folder="E:/datasets/OSdataset",
        img_folder="E:/datasets/OSdataset/256-pos-neg", 
        json_folder="E:/workspace/SOMatch/tmp/json")


# split dset to train and test
if __name__ == "__main__1":
    import random
    import json

    def read_merged_json(*args):
        pairs = []

        for path in args:
            with open(path, 'r') as fp:
                files = json.load(fp)

            for o, s in zip(files["OPT"], files["SAR"]):
                cur = [o, s]
                pairs.append(cur)

        return pairs

    def split_dset(content, *, val_num=1000):
        length = len(content)
        interval = length // val_num
        val_ids = [interval*i for i in range(val_num)]
        train_ids = []
        
        for i in range(length):
            if i in val_ids:
                continue

            train_ids.append(i)

        # print(len(train_ids)+len(val_ids)==length)
        # print(set(train_ids)&set(val_ids))

        val_ctt = [content[i] for i in val_ids]
        train_ctt = [content[i] for i in train_ids]

        return {"train" : train_ctt, "test": val_ctt}

    def list2json(content, path):
        opt = [i[0] for i in content]
        sar = [i[1] for i in content]

        ctt = {"OPT" : opt, "SAR" : sar}

        with open(path, "w") as fp:
            json.dump(ctt, fp, indent="\t", sort_keys=True)

    path = ("E:/workspace/SOMatch/json/os-select/os-select.json", )
    content = read_merged_json(*path)
    dset = split_dset(content)
    
    train_path = "E:/workspace/SOMatch/json/os-select/1-s-train.json"
    test_path = "E:/workspace/SOMatch/json/os-select/1-s-test.json"
    list2json(dset["train"], train_path)
    list2json(dset["test"], test_path)


if __name__ == "__main__":
    import json

    json_path = "E:/workspace/SOMatch/json/os-select/1-s-test.json"
    save_path = "E:/workspace/SOMatch/json/os-select/1-s-test-mask.json"
    folder = "mask-pos-025"

    with open(json_path) as fp:
        content = json.load(fp)
        path_list = content["SAR"]
    
    fn_func = lambda path : "/".join((folder, os.path.basename(os.path.dirname(path))+"-"+os.path.basename(path)))

    mask_list = [fn_func(i) for i in path_list]
    content["MASK"] = mask_list

    with open(save_path, "w") as fp:
        json.dump(content, fp, indent="\t", sort_keys=True)


# work as cmd 
# "list" : get list file of all patchs in os dataset
if __name__ == "__main__1":

    p = input("select program --> ")

    
    if p == "list":

        save_dir = "./json/os_dataset"
        dset_dir = "E:/datasets/OSdataset"

        if not os.path.exists(save_dir):
            os.mkdir(save_dir)

        get_os_filelist(dset_dir, save_dir=save_dir)

    if p == "crn":

        crn_cmd(
            read_func=partial(open_os, scale=255, mean=True), 
            fn2wkt=fn2wkt_os, wkt2fn=wkt2fn_os, 
            pt_range=[26, 230, 26, 230], 
        )

        # python -m tools.os_helper -d "E:\datasets\OSdataset" -s "E:\workspace\SOMatch\json\os_harris\256_train" -l "E:\workspace\SOMatch\json\os_dataset\256_train.json"   

    if p == "plot":

        plot_cmd(
            wkt2fn=wkt2fn_os, 
            read_func=partial(open_os, scale=0, mean=True)
        )

        # python -m tools.os_helper -d "E:\datasets\OSdataset" -o "E:\workspace\SOMatch\json\os_harris\256_train\OPT_HARRIS0.05.json" -s "E:\workspace\SOMatch\json\os_harris\256_train\SAR_HARRIS0.05.json" -v "E:\datasets\client-data\os_train_plot"




