from torch.utils.data import Dataset
from torchvision import datasets
import numpy as np
import pandas as pd


class CustomDataset(Dataset):
    def __init__(self, features, labels, transform=None, target_transform=None):
        self.features = features
        self.labels = labels
        self.transform = transform
        self.target_transform = target_transform

    def __len__(self):
        return len(self.labels)

    def __getitem__(self, idx):
        feature = self.features[idx]
        label = self.labels[idx]
        if self.transform:
            feature = self.transform(feature)
        if self.target_transform:
            label = self.target_transform(label)
        return feature, label


# Purchase100数据集处理
def load_Purchase100(set_id, num_shadowsets, prop_keep=0.14, seed=0):
    
    dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)
    X = dataframe.iloc[:, range(600)].values
    Y = np.array([i for i in dataframe.loc[:, 600]])
    dataset_size = X.shape[0]

    # 为每个数据集采样数据，确保每个数据分布在二分之一的数据集中,目标输出两个keep矩阵
    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
    test_keep = order >= int((1 - prop_keep) * num_shadowsets)
    train_keep_exp = np.array(train_keep[set_id], dtype=bool)
    test_keep_exp = np.array(test_keep[set_id], dtype=bool)

    x_train = X[train_keep_exp]
    y_train = Y[train_keep_exp]
    x_test = X[test_keep_exp]
    y_test = Y[test_keep_exp]

    return (x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp

# Purchase100数据集处理
def load_Purchase100_limited(set_id, num_shadowsets, prop_keep=0.5, seed=0):
    
    dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)
    X = dataframe.iloc[:, range(600)].values
    Y = np.array([i for i in dataframe.loc[:, 600]])

    X = X[:60000]
    Y = Y[:60000]
    dataset_size = X.shape[0]

    # 为每个数据集采样数据，确保每个数据分布在二分之一的数据集中,目标输出两个keep矩阵
    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
    test_keep = 1 - train_keep
    train_keep_exp = np.array(train_keep[set_id], dtype=bool)
    test_keep_exp = np.array(test_keep[set_id], dtype=bool)

    x_train = X[train_keep_exp]
    y_train = Y[train_keep_exp]
    x_test = X[test_keep_exp]
    y_test = Y[test_keep_exp]

    return (x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp

def load_Purchase100_limited_keep(num_shadowsets, prop_keep=0.5, seed=0):
    
    dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)
    X = dataframe.iloc[:, range(600)].values
    Y = np.array([i for i in dataframe.loc[:, 600]])

    X = X[:60000]
    Y = Y[:60000]
    dataset_size = X.shape[0]

    # 为每个数据集采样数据，确保每个数据分布在二分之一的数据集中,目标输出两个keep矩阵
    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)

    return X, Y, train_keep

def load_MNIST(set_id, num_shadowsets, prop_keep=0.5, seed=0):
    train_dataset = datasets.MNIST(root='../datasets/mnist', train=True, transform=None, download=True)
    test_dataset = datasets.MNIST(root='../datasets/mnist', train=False, transform=None, download=True)
    x_train_data = train_dataset.data.numpy()
    y_train_data = train_dataset.targets.numpy()
    x_test_data = test_dataset.data.numpy()
    y_test_data = test_dataset.targets.numpy()

    train_dataset_size = x_train_data.shape[0]

    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, train_dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
    test_keep = 1 - train_keep
    train_keep_exp = np.array(train_keep[set_id], dtype=bool)
    test_keep_exp = np.array(test_keep[set_id], dtype=bool)

    x_train = x_train_data[train_keep_exp]
    y_train = y_train_data[train_keep_exp]
    x_test = x_train_data[test_keep_exp]
    y_test = y_train_data[test_keep_exp]

    return (x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp

def load_MNIST_keep(num_shadowsets, prop_keep=0.5, seed=0):
    train_dataset = datasets.MNIST(root='../datasets/mnist', train=True, transform=None, download=True)
    test_dataset = datasets.MNIST(root='../datasets/mnist', train=False, transform=None, download=True)
    x_train_data = train_dataset.data.numpy()
    y_train_data = train_dataset.targets.numpy()
    x_test_data = test_dataset.data.numpy()
    y_test_data = test_dataset.targets.numpy()

    train_dataset_size = x_train_data.shape[0]

    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, train_dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
    
    return x_train_data, y_train_data, train_keep




def load_CIFAR10(set_id, num_shadowsets, prop_keep=0.5, seed=0):

    train_dataset = datasets.cifar.CIFAR10(root='../datasets/cifar10', train=True, transform=None, download=True)
    test_dataset = datasets.cifar.CIFAR10(root='../datasets/cifar10', train=False, transform=None, download=True)

    x_train_data = train_dataset.data
    y_train_data = np.array(train_dataset.targets)
    x_test_data = test_dataset.data
    y_test_data = np.array(test_dataset.targets)

    train_dataset_size = x_train_data.shape[0]

    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, train_dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
    test_keep = 1 - train_keep
    train_keep_exp = np.array(train_keep[set_id], dtype=bool)
    test_keep_exp = np.array(test_keep[set_id], dtype=bool)

    x_train = x_train_data[train_keep_exp]
    y_train = y_train_data[train_keep_exp]
    x_test = x_train_data[test_keep_exp]
    y_test = y_train_data[test_keep_exp]

    return (x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp


def load_CIFAR10_keep(num_shadowsets, prop_keep=0.5, seed=0):

    train_dataset = datasets.cifar.CIFAR10(root='../datasets/cifar10', train=True, transform=None, download=True)
    test_dataset = datasets.cifar.CIFAR10(root='../datasets/cifar10', train=False, transform=None, download=True)

    x_train_data = train_dataset.data
    y_train_data = np.array(train_dataset.targets)
    x_test_data = test_dataset.data
    y_test_data = np.array(test_dataset.targets)

    train_dataset_size = x_train_data.shape[0]

    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, train_dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
   
    return x_train_data, y_train_data, train_keep



def load_CIFAR100(set_id, num_shadowsets, prop_keep=0.5, seed=0):

    train_dataset = datasets.cifar.CIFAR100(root='../datasets/cifar100', train=True, transform=None, download=True)
    test_dataset = datasets.cifar.CIFAR100(root='../datasets/cifar100', train=False, transform=None, download=True)

    x_train_data = train_dataset.data
    y_train_data = np.array(train_dataset.targets)
    x_test_data = test_dataset.data
    y_test_data = np.array(test_dataset.targets)

    train_dataset_size = x_train_data.shape[0]

    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, train_dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
    test_keep = 1 - train_keep
    train_keep_exp = np.array(train_keep[set_id], dtype=bool)
    test_keep_exp = np.array(test_keep[set_id], dtype=bool)

    x_train = x_train_data[train_keep_exp]
    y_train = y_train_data[train_keep_exp]
    x_test = x_train_data[test_keep_exp]
    y_test = y_train_data[test_keep_exp]

    return (x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp


def load_CIFAR100_keep(num_shadowsets, prop_keep=0.5, seed=0):

    train_dataset = datasets.cifar.CIFAR100(root='../datasets/cifar100', train=True, transform=None, download=True)
    test_dataset = datasets.cifar.CIFAR100(root='../datasets/cifar100', train=False, transform=None, download=True)

    x_train_data = train_dataset.data
    y_train_data = np.array(train_dataset.targets)
    x_test_data = test_dataset.data
    y_test_data = np.array(test_dataset.targets)

    train_dataset_size = x_train_data.shape[0]

    np.random.seed(seed)
    keep_matrix = np.random.uniform(0,1,size=(num_shadowsets, train_dataset_size))
    order = keep_matrix.argsort(0)
    train_keep = order < int(prop_keep * num_shadowsets)
   
    return x_train_data, y_train_data, train_keep