import glob
import re

import PIL
from scipy import io
import h5py
import hdf5storage

# 把三维图像数组，比如256x256x10的数组转换成10个256x256，并将其保存为图像。
import os
import imageio
import numpy as np
import mat73


def save_mat(save_mat_filepath, saved_datas):
    t = type(saved_datas)
    if not t == dict:
        hdf5storage.savemat(save_mat_filepath,
                            {'data': saved_datas},
                            format='7.3',
                            oned_as='column',
                            store_python_metadata=False)
    else:
        hdf5storage.savemat(save_mat_filepath,
                            saved_datas,
                            format='7.3',
                            oned_as='column',
                            store_python_metadata=False)


def load_mat(loaded_mat_file, key):
    file = hdf5storage.loadmat(loaded_mat_file)
    keys = list(file.keys())
    data = ()
    for k in keys:
        data.append(file[k])
    return data


def load_mat(loaded_mat_file):
    file = hdf5storage.loadmat(loaded_mat_file)
    data = file['data']
    return data


def load_mat_by_key(loaded_mat_file, key):
    file = hdf5storage.loadmat(loaded_mat_file)
    data = file[key]
    return data


def load_mat_all_keys(loaded_mat_file):
    file = hdf5storage.loadmat(loaded_mat_file)
    return file


#
def joint_xy(files, save_path, per_mat=1, total_mat=10, start_data=1):
    type = files[0].split('\\')[-1]
    if not os.path.exists(save_path):
        os.mkdir(save_path)
    i = start_data
    j = 1
    x = None
    y = None
    while i <= start_data + total_mat - 1:
        print(i)
        for f in files:
            mats = os.listdir(f)
            mats = list(filter(lambda x: x.endswith('x'+str(i)+'.mat') , mats))
            f = os.path.join(f,mats[0])
            x_data = load_mat(f)
            y_data = load_mat(f.replace('x', 'y'))
            print('拼接{}'.format(f))
            if x is None:
                    x = x_data
                    y = y_data
            else:
                x = np.concatenate((x, x_data), axis=0)
                y = np.concatenate((y, y_data), axis=0)
        print(x.shape)
        print(y.shape)
        if not ((i - start_data + 1) % per_mat == 0 or i == total_mat + start_data - 1):
            i += 1
            continue
        if i == j :
            x_name = '{}_x{}.mat'.format(type , i)
        else:
            x_name = '{}_x{}-{}.mat'.format(type , j , i)
        x_ = os.path.join(save_path, x_name)
        y_ = os.path.join(save_path, x_name.replace('x', 'y'))
        save_mat(x_, x)
        save_mat(y_, y)
        i += 1
        j = i
        print('保存至', x_,y_)
        x = None
        y = None


def joint_tests(files, save_path):
    type = files[0].split('\\')[-1]
    test = None
    print(len(files))
    for f in files:
        mats = os.listdir(f)
        mats = list(filter(lambda x : x.endswith('test.mat') , mats))
        for m in mats:
            test_data = load_mat(os.path.join(f,m))
            if test is None:
                    test = test_data
            else:
                test = np.concatenate((test, test_data), axis=0)
            print(test.shape)

    test_name = '{}test.mat'.format(type + '_')
    test_ = os.path.join(save_path, test_name.replace('x', 'y'))
    save_mat(test_, test)
    print('保存至:' , test_)


def isDigital(x):
    return re.search('x\d', x) or re.search('y\d', x)


def joint_single_dir(dir, save_path='', per_mat=0, total=0):
    if save_path is None or save_path == '':
        save_path = os.path.join(dir, 'total')
    print(dir)
    l = os.listdir(dir)
    l = list(filter(isDigital, l))
    l.sort(key=lambda name: int(re.findall('\d+', name)[0]))
    if total == 0:
        total = int(len(l) / 2)
    if per_mat == 0:
        per_mat = total

    x_total = ''
    y_total = ''
    i = 0
    for f in l:
        if not (f.__contains__('x')):
            continue
        i += 1

        x_data = load_mat(os.path.join(dir, f))
        y_data = load_mat(os.path.join(dir, f.replace('x', 'y')))

        if x_total is None:
                x_total = x_data
                y_total = y_data
        else:
            x_total = np.concatenate((x_total, x_data), axis=0)
            y_total = np.concatenate((y_total, y_data), axis=0)
        print(f'{x_total.shape=}')
        print(f'{y_total.shape=}')
        print(i)
        if i % per_mat == 0 or i == total:
            x_ = os.path.join(save_path , 'x' + str(i / per_mat) + '.mat')
            y_ = x_.replace('x' , 'y')
            save_mat(x_, x_total)
            save_mat(y_, y_total)
            print('保存至:' + save_path)
            x_total = None
            y_total = None


def show_predict_mat_results(predict_path, test_path='', predict_suffix='predict.mat'):
    if test_path == '' or test_path is None:
        test_path = predict_path
    files = os.listdir(predict_path)
    files = list(filter(lambda name: name.endswith(predict_suffix), files))
    for f in files:
        predict = os.path.join(predict_path , f)
        test =  predict.replace('predict', 'test')
        origin =  predict.replace('predict', 'origin')
        save_path = os.path.join(predict_path , 'results')
        if not os.path.exists(save_path):
            os.mkdir(save_path)
        show_images_from_mat(predict, save_path)
        show_images_from_mat(test, save_path)
        show_images_from_mat(origin, save_path)


def show_images_from_mat(file, save_path, suffix='', key='data'):
    if suffix == '':
        suffix = file.split('\\')[-1].split('.')[0] + '.png'
    if not os.path.exists(save_path):
        os.mkdir(save_path)
    data = load_mat_by_key(file, key)
    for i in range(len(data)):
        img = PIL.Image.fromarray(data[i]).convert("L")
        img.save(os.path.join(save_path , str(i) + suffix))


if __name__ == '__main__':
    print()
#     joint_tests((r'E:\download\Dataset\keras\IXI-T1\output\train\artifact'
#                  , r'E:\download\Dataset\keras\IXI-T2\output\train\artifact'
#                  , r'E:\download\Dataset\keras\IXI-PD\output\train\artifact'
#                  , r'E:\download\Dataset\keras\IXI-DTI\output\train\artifact'
#                  , r'E:\download\Dataset\keras\IXI-MRA\output\train\artifact'
#                  , r'E:\download\Dataset\keras\IXI-T1\output\train\noise'
#                  , r'E:\download\Dataset\keras\IXI-T2\output\train\noise'
#                  , r'E:\download\Dataset\keras\IXI-PD\output\train\noise'
#                  , r'E:\download\Dataset\keras\IXI-DTI\output\train\noise'
#                  , r'E:\download\Dataset\keras\IXI-MRA\output\train\noise'
#                  , r'E:\download\Dataset\keras\IXI-T1\output\train\detail_less'
#                  , r'E:\download\Dataset\keras\IXI-T2\output\train\detail_less'
#                  , r'E:\download\Dataset\keras\IXI-PD\output\train\detail_less'
#                  , r'E:\download\Dataset\keras\IXI-DTI\output\train\detail_less'
#                  , r'E:\download\Dataset\keras\IXI-MRA\output\train\detail_less'),
#                 save_path=r'E:\download\Dataset\keras\train\all')
#
#     # 把不同来源的所有test按类型分组拼接
#
# joint_tests((r'E:\download\Dataset\keras\IXI-T1\output\train\detail_less'
#              , r'E:\download\Dataset\keras\IXI-T2\output\train\detail_less'
#              , r'E:\download\Dataset\keras\IXI-PD\output\train\detail_less'
#              , r'E:\download\Dataset\keras\IXI-DTI\output\train\detail_less'
#              , r'E:\download\Dataset\keras\IXI-MRA\output\train\detail_less'),
#             save_path=r'E:\download\Dataset\keras\train\local\detail_less')
# joint_tests((r'E:\download\Dataset\keras\IXI-T1\output\train\noise'
#              , r'E:\download\Dataset\keras\IXI-T2\output\train\noise'
#              , r'E:\download\Dataset\keras\IXI-PD\output\train\noise'
#              , r'E:\download\Dataset\keras\IXI-DTI\output\train\noise'
#              , r'E:\download\Dataset\keras\IXI-MRA\output\train\noise'),
#             save_path=r'E:\download\Dataset\keras\train\local\noise')
# joint_tests((r'E:\download\Dataset\keras\IXI-T1\output\train\artifact'
#              , r'E:\download\Dataset\keras\IXI-T2\output\train\artifact'
#              , r'E:\download\Dataset\keras\IXI-PD\output\train\artifact'
#              , r'E:\download\Dataset\keras\IXI-DTI\output\train\artifact'
#              , r'E:\download\Dataset\keras\IXI-MRA\output\train\artifact'),
#             save_path=r'E:\download\Dataset\keras\train\local\artifact')

# 拼接一个文件夹下的所有mat
# joint_single_dir(r'E:\download\Dataset\keras\train\all',r'E:\download\Dataset\keras\train\cloud' , per_mat=3)
