import os
import argparse
import pickle
from tqdm import tqdm
import numpy as np
from pprint import pprint
import random
import matplotlib.pyplot as plt
import cv2
from scipy.spatial import cKDTree

from data.dataset import build_dataset
from data.utils import read_image

def parse_args():
    parser = argparse.ArgumentParser(description='visualize 3d input distribution')
    parser.add_argument('--datasets', type=str, help='datasets name', required=True)
    parser.add_argument('-n', '--num', type=int, help='number of images', required=True)
    parser.add_argument('--threshold', type=float, help='threshold for outlier detection', default=4)
    args = parser.parse_args()
    return args

class TinyDataset:
    def __init__(self, dataset_names, max_size):
        self.dataset_dicts = build_dataset(*dataset_names)
        self.dataset_dicts = random.sample(self.dataset_dicts, max_size)

    def __len__(self):
        return len(self.dataset_dicts)

    def __getitem__(self, index):
        filename = self.dataset_dicts[index]['file_name']
        image = read_image(filename)
        return image

if __name__ == '__main__':
    args = parse_args()
    
    train_datasets = ["{}-train".format(dataset) for dataset in args.datasets.split(',')]
    dataset = TinyDataset(train_datasets, max_size=args.num)

    """ RGB space """
    ax_rgb = plt.subplot(222, projection='3d')
    ax_rg = plt.subplot(221)
    ax_gb = plt.subplot(223)
    ax_rb = plt.subplot(224)
    plt.subplots_adjust(wspace=0.3, hspace=0.3)

    pixels_list = [image.reshape(-1, image.shape[-1]) for image in tqdm(dataset) if image.shape[-1] == 3]
    pixels = np.concatenate(pixels_list)
    del pixels_list
    print("pixels shape:", pixels.shape)

    ax_rgb.scatter(pixels[:,0], pixels[:,1], pixels[:,2], s=0.5, color='b')
    ax_rg.scatter(pixels[:,0], pixels[:,1], s=0.5, color='b')
    ax_gb.scatter(pixels[:,1], pixels[:,2], s=0.5, color='b')
    ax_rb.scatter(pixels[:,0], pixels[:,2], s=0.5, color='b')

    print("clustering")

    pixels_class = np.round(pixels.mean(1)).astype(np.uint8)
    cluster_centers = np.empty((256, 3))
    cluster_done = np.zeros((256,))
    for class_id in range(256):
        currect_pixels = pixels[pixels_class == class_id]
        if len(currect_pixels) > 0:
            mean = currect_pixels.mean(0)
            std = currect_pixels.std(0)
            zscore = (currect_pixels - mean) / (std + 1e-7)
            zscore = np.abs(zscore).sum(1)
            currect_pixels = currect_pixels[zscore <= args.threshold]
            if len(currect_pixels) > 0:
                cluster_centers[class_id] = currect_pixels.mean(0)
                cluster_done[class_id] = 1
    
    cluster_centers[0] = [0,0,0]
    cluster_centers[-1] = [255,255,255]
    cluster_done[0] = cluster_done[-1] = 1

    def detect_empty(x):
        temp = (x[1:] - x[:-1])
        i = 0
        start = None
        while i < 255:
            if (temp[i] < 0):
                start = i
            elif (temp[i] > 0):
                yield start, i + 1
            i += 1
    
    for start, end in detect_empty(cluster_done):
        start_center = cluster_centers[start]
        end_center = cluster_centers[end]
        n = end - start
        for i in range(1, n):
            cluster_centers[start + i] = (n - i) / n * start_center + i / n * end_center

    print("cluster done, drawing")

    ax_rgb.scatter(cluster_centers[:,0], cluster_centers[:,1], cluster_centers[:,2], s=1, color='r')
    ax_rg.scatter(cluster_centers[:,0], cluster_centers[:,1], s=3, color='r')
    ax_gb.scatter(cluster_centers[:,1], cluster_centers[:,2], s=3, color='r')
    ax_rb.scatter(cluster_centers[:,0], cluster_centers[:,2], s=3, color='r')


    ax_rgb.set_xlabel('R')
    ax_rgb.set_ylabel('G')
    ax_rgb.set_zlabel('B')

    ax_rg.set_xlabel('R')
    ax_rg.set_ylabel('G')

    ax_gb.set_xlabel('G')
    ax_gb.set_ylabel('B')

    ax_rb.set_xlabel('R')
    ax_rb.set_ylabel('B')

    print("saving transformed examples")
    output_path = os.path.join('output', 'cluster_3d_input', '&'.join(train_datasets), 'origin_image.png')
    os.makedirs(os.path.dirname(output_path), exist_ok=True)
    example_image = dataset[0]
    plt.imsave(output_path, example_image)
    tree = cKDTree(cluster_centers)
    h, w, c = example_image.shape
    transformed_image = tree.query(example_image.reshape(-1, c), k=1)[1].reshape(h, w).astype(np.uint8)
    output_path = os.path.join('output', 'cluster_3d_input', '&'.join(train_datasets), 'transformed_image.png')
    os.makedirs(os.path.dirname(output_path), exist_ok=True)
    plt.imsave(output_path, np.expand_dims(transformed_image,-1).repeat(3,2))


    print("saving figure")

    output_path = os.path.join('output', 'cluster_3d_input', '&'.join(train_datasets), '3d_input_RGB.png')
    os.makedirs(os.path.dirname(output_path), exist_ok=True)
    plt.savefig(output_path, dpi=240)

    print("saving cluster centers")

    output_path = os.path.join('output', 'cluster_3d_input', '&'.join(train_datasets), 'centers.pkl')
    with open(output_path, 'wb') as f:
        pickle.dump(cluster_centers, f)

    print("done")