import torch
import os
import json
import numpy as np


#数据格式：  data = {'users' : [str(1, 2, 3, ..., n)], 'user_data' : {'1' : {1号user的数据}, '2' : {2号user的数据}, ..., 'n' : {n号user的数据}}}
#1, ..., n号user的数据格式为： {'x' : [(m, 28 *28)的列表], 'y' : [m个标签]},其中m表示每个user拥有的数据

# def get_cifar10_data_path(dir_path, degree_of_noniid):
#     data_path = os.path.join(dir_path, '%sold'%str(degree_of_noniid))
#     return data_path

def get_data_path(dir_path, degree_of_noniid):
    data_path = os.path.join(dir_path, '%sold'%str(degree_of_noniid))
    return data_path


def read_total_data(data_path, test_data_mode):
    train_file_path = os.path.join(data_path, 'train')
    if test_data_mode == 'iid':
        test_file_path = os.path.join(data_path, 'test_iid')
    elif test_data_mode == 'noniid':
        test_file_path = os.path.join(data_path, 'test_noniid')

    train_files = os.listdir(train_file_path)
    train_files = [f for f in train_files if f.endswith('.json')]
    test_files = os.listdir(test_file_path)
    test_files = [f for f in test_files if f.endswith('.json')]

    train_data_list = []
    for f in train_files:
        train_data_path = os.path.join(train_file_path, f)
        with open(train_data_path, 'r') as train_f:
            total_train_data = json.load(train_f)
            train_data_list.append(total_train_data)
    
    test_data_list = []
    for f in test_files:
        test_data_path = os.path.join(test_file_path, f)
        with open(test_data_path, 'r') as test_f:
            total_test_data = json.load(test_f)
            test_data_list.append(total_test_data)

    return train_data_list, test_data_list

#num means which num in train_data_list(test_data_list) you want to read
def read_data(data_path, num, test_data_mode):
    train, test = read_total_data(data_path, test_data_mode = test_data_mode)
    train_data_dic = train[num]
    test_data_dic = test[num]

    if train_data_dic['users'] == test_data_dic['users']:
        clients = sorted(train_data_dic['users'])
    else:
        raise Exception('train_data_dic[\'users\'] != test_data_dic[\'users\']')

    train_data = train_data_dic['user_data']
    test_data = test_data_dic['user_data']

    return train_data, test_data, clients

def batch_data(data, batch_size):
    data_x = data['x']
    data_y = data['y']

    # randomly shuffle data
    np.random.seed(100)
    rng_state = np.random.get_state()
    np.random.shuffle(data_x)
    np.random.set_state(rng_state)
    np.random.shuffle(data_y)

    batch_data_list = []

    for i in range(0, len(data_x), batch_size):
        batch_x = data_x[i : i + batch_size]
        batch_y = data_y[i : i + batch_size]
        
        batch_x = torch.from_numpy(np.asarray(batch_x, dtype = np.float32))
        batch_y = torch.from_numpy(np.asarray(batch_y, dtype = np.float32))

        batch_data_list.append((batch_x, batch_y)) #batch_x and batch_y is tensor
    
    return batch_data_list

def load_partition_data(batch_size, degree_of_noniid, mode, test_data_mode, num = 0):
    if mode == 'mnist':
        dir_path = '/home/linjunke/my_fedml/data/MNIST'
        data_path = get_data_path(dir_path, degree_of_noniid)
    elif mode == 'cifar10':
        dir_path = '/home/linjunke/my_fedml/data/cifar10'
        data_path = get_data_path(dir_path, degree_of_noniid)
    elif mode == 'cifar100':
        dir_path = '/home/linjunke/my_fedml/data/cifar100'
        data_path = get_data_path(dir_path, degree_of_noniid)
    print(data_path)
    if degree_of_noniid == '0':
        test_data_mode = 'iid'
    train_data, test_data, clients = read_data(data_path, num, test_data_mode = test_data_mode)

    client_idx = 0
    train_data_dict = {}
    test_data_dict = {}
    each_client_data_num_dict = {}
    for client in clients:
        #client_train_data_dict: {1 : [(batch_x, batch_y), (),..., ()], 2 : [], ..., }
        client_train_data = train_data[client]
        client_test_data = test_data[client]

        client_train_data_num = len(client_train_data['y'])
        client_test_data_num = len(client_test_data['y'])
        each_client_data_num_dict[client_idx] = (client_train_data_num, client_test_data_num)

        client_train_batch_data_list = batch_data(client_train_data, batch_size = batch_size)
        client_test_batch_data_list = batch_data(client_test_data, batch_size = batch_size)
        train_data_dict[client_idx] = client_train_batch_data_list
        test_data_dict[client_idx] = client_test_batch_data_list
        client_idx += 1
    
    total_client_num = client_idx
    return total_client_num, train_data_dict, test_data_dict, each_client_data_num_dict