import tensorflow as tf
from tensorflow.keras.utils import to_categorical
import os
import gc
import numpy as np


class DataLoader():
    def __init__(self, args, root_path, dir) -> None:
        self.root_path = root_path
        self.dir = dir
        self.image_path = os.path.join(self.root_path, self.dir)
        self.images = os.listdir(self.image_path)
        self.args = args

    def __getitem__(self, index) -> tuple:
        fmri_path = os.path.join(self.root_path, self.dir, self.images[index])
        fmri = np.load(fmri_path)
        normalization = 'minmax'
        recover = True
        n_slices, vectors = fmri.shape
        co_matrix = []
        for i in range(n_slices):
            if normalization == 'minmax':
                vectors = fmri[i,:]
                min =np.min(vectors)
                max =np.max(vectors)
                vectors = 2*(vectors-min)/(max-min) -1
            if recover:
                matrix = np.zeros((self.args.n_rois,self.args.n_rois))
                upper_matrix_indices = np.triu_indices(self.args.n_rois, k=1)
                matrix[upper_matrix_indices] = vectors
                matrix += matrix.T
                np.fill_diagonal(matrix, 1)
                co_matrix.append(matrix)
        # adding channel dimension
        np.expand_dims(fmri, axis=-1)
        label = 0
        if self.dir == 'AD/':
            label = label + 1
        elif self.dir == 'CN/':
            label = label
        del fmri, vectors, matrix
        gc.collect()
        return co_matrix, label

    def __len__(self) -> int:
        return len(self.images)

def load_data(args, root_path, path1, path2, tfBool) -> tuple:
    dataLoader_AD = DataLoader(args, root_path, path1)
    dataLoader_CN = DataLoader(args, root_path, path2)
    data_AD, labels_AD = [], []
    for i in range(len(dataLoader_AD)):
        data, label = dataLoader_AD[i]
        data_AD.append(data)
        labels_AD.append(label)
    data_CN, labels_CN = [], []
    for i in range(len(dataLoader_CN)):
        data, label = dataLoader_CN[i]
        data_CN.append(data)
        labels_CN.append(label)
    datasets = np.array(data_AD + data_CN)
    unique, counts = np.unique(np.array(labels_AD + labels_CN), return_counts=True)
    print("set label distribution:", dict(zip(unique, counts)))
    labels = to_categorical(np.array(labels_AD + labels_CN), num_classes=args.n_classes)
    print("the shape of X_:", datasets.shape)
    print("the shape of Y_:", labels.shape)
    if tfBool :
      datasets = tf.data.Dataset.from_tensor_slices((datasets, labels)).batch(args.batch_size).repeat()
    del data_AD, labels_AD, data_CN, labels_CN, dataLoader_AD, dataLoader_CN
    gc.collect()
    return datasets, labels