import numpy as np
import os
import argparse
import random
import shutil 

parser = argparse.ArgumentParser()
parser.add_argument('--datarootA', required=True, help='path to images A ',default='./datasets/check_model')
parser.add_argument('--datarootB', required=True, help='path to images B ',default='./datasets/check_model')
parser.add_argument('--destination', required=True, help='the fianl dataset place root',default='./datasets/check_model')
parser.add_argument('--random', type=bool, default=True, help='name of the experiment. It decides where to store samples and models')


args = parser.parse_args()
print(args)

IMG_EXTENSIONS = [
    '.jpg', '.JPG', '.jpeg', '.JPEG',
    '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',
    '.tif', '.TIF', '.tiff', '.TIFF','.npy'
]


def is_image_file(filename):
    return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)


def make_dataset(dir, max_dataset_size=float("inf")):
    images = []
    assert os.path.isdir(dir), '%s is not a valid directory' % dir

    for root, _, fnames in sorted(os.walk(dir)):
        for fname in fnames:
            if is_image_file(fname):
                path = os.path.join(root, fname)
                images.append(path)
    return images[:min(max_dataset_size, len(images))]

dir_A = args.datarootA
dir_B = args.datarootB

A_paths = sorted(make_dataset(dir_A))   # load images from '/path/to/data/trainA'
B_paths = sorted(make_dataset(dir_B))    # load images from '/path/to/data/trainB'

dataset_len = len(A_paths)
if args.random:
    print("these data will randomly distribute")
    # train_len = int(dataset_len*0.6)
    # test_len = int(dataset_len*0.2)
    # val_len = dataset_len-train_len-test_len
    arr_indexes = [i for i in range(dataset_len)]
    random.seed(0)
    random.shuffle(arr_indexes)
    indexed_train = arr_indexes[:int(len(arr_indexes) * 0.8)]
    indexed_valid = arr_indexes[int(len(arr_indexes) * 0.8):int(len(arr_indexes) * 0.9)]
    indexed_test = arr_indexes[int(len(arr_indexes) * 0.9):]
else:
    arr_indexes = [i for i in range(dataset_len)]
    indexed_train = arr_indexes[:int(len(arr_indexes) * 0.6)]
    indexed_valid = arr_indexes[int(len(arr_indexes) * 0.6):int(len(arr_indexes) * 0.8)]
    indexed_test = arr_indexes[int(len(arr_indexes) * 0.8):]

os.makedirs(os.path.join(args.destination, "trainA"), exist_ok=True)
os.makedirs(os.path.join(args.destination, "testA"), exist_ok=True)
os.makedirs(os.path.join(args.destination, "valA"), exist_ok=True)
os.makedirs(os.path.join(args.destination, "trainB"), exist_ok=True)
os.makedirs(os.path.join(args.destination, "testB"), exist_ok=True)
os.makedirs(os.path.join(args.destination, "valB"), exist_ok=True)

for i in indexed_train:
    shutil.copyfile(A_paths[i], os.path.join(args.destination, "trainA/"+A_paths[i][-8:]))
    shutil.copyfile(B_paths[i], os.path.join(args.destination, "trainB/"+B_paths[i][-8:]))
for i in indexed_test:
    shutil.copyfile(A_paths[i], os.path.join(args.destination, "testA/"+A_paths[i][-8:]))
    shutil.copyfile(B_paths[i], os.path.join(args.destination, "testB/"+B_paths[i][-8:]))
for i in indexed_valid:
    shutil.copyfile(A_paths[i], os.path.join(args.destination, "valA/"+A_paths[i][-8:]))
    shutil.copyfile(B_paths[i], os.path.join(args.destination, "valB/"+B_paths[i][-8:]))