import numpy as np
import struct

def unpickle(file):
    import pickle
    with open(file, 'rb') as fo:
        dict = pickle.load(fo, encoding='bytes')
    return dict

def Load_CIFAR10_Data(num_training=49000, num_validation=1000, num_test=1000):

    xs = []
    ys = []
    path = "C:\\Data\\cifar-10-python\\cifar-10-batches-py\\"

    #---Loop file to Read---
    for b in range(1, 6):
        batch = unpickle(path + "data_batch_" + str(b))
        xs.append(batch[b"data"])
        ys.append(batch[b"labels"])

    x = np.concatenate(xs)
    y = np.concatenate(ys)

    #---Load Test Batch---
    batch = unpickle(path + "test_batch")
    x2 = np.concatenate([batch[b"data"]])
    y2 = np.concatenate([batch[b"labels"]])
    #x3 = batch[b"data"]
    #y3 = batch[b"labels"]

    #---Test Code---
    #print(x.shape)
    #print(x.ndim)
    #---Slicing---
    #b = a[:2, 1:3] #first 2 rows and columns 1 and 2
    #x = x[:,0:100] #取前100个维度

    #---Segment Train vs Valid---
    mask = list(range(num_training))
    x_Train = x[mask]
    y_Train = y[mask]
    #
    mask = list(range(num_training, num_training + num_validation))
    x_Val = x[mask]
    y_Val = y[mask]
    #
    mask = list(range(num_test))
    x_Test = x2[mask]
    y_Test = y2[mask]

    #---Normalization---
    # Normalize the data: subtract the mean image
    mean_image = np.mean(x_Train, axis=0)
    x_Train = np.subtract(x_Train,mean_image)
    x_Val = np.subtract(x_Val, mean_image)
    x_Test = np.subtract(x_Test, mean_image)
    #x_Train -= mean_image
    #x_Val -= mean_image
    #x_Test -= mean_image


    # Transpose so that channels come first
    #x_Train = x_Train.transpose(0, 3, 1, 2).copy()
    #x_Val = x_Val.transpose(0, 3, 1, 2).copy()
    #x_Test = x_Test.transpose(0, 3, 1, 2).copy()

    return x_Train, y_Train, x_Val, y_Val, x_Test, y_Test


def Load_MNIST_Data():
    path = ""
    Load_MNIST_File(path + 'train-images.idx3-ubyte')
    kkwood = 0


def Load_MNIST_File(pathfileName):

    binfile = open(pathfileName, 'rb')
    buf = binfile.read()

    index = 0
    magic, numImages, numRows, numColumns = struct.unpack_from('>IIII', buf, index)
    index += struct.calcsize('>IIII')

    for i in range(numImages):
        im = struct.unpack_from('>784B', buf, index)
        index += struct.calcsize('>784B')
        im = np.array(im)
        im = im.reshape(1, 28 * 28)

def GenerateGaussData(mean=[2,2],sd=[1,1], num=100):
    dim = len(mean)
    data = []
    for index in range(num):
        x = []
        for i in range(dim):
            xi = np.random.normal(mean[i],sd[i])
            x.append(xi)
        data.append(x)

    return data

#---Generate PlayGround Data---
def Generate_Playground_Data():
    data1 = GenerateGaussData([2, 2], [1, 1], 100)
    data2 = GenerateGaussData([-2, -2], [1, 1], 100)
    labels1 = []
    labels2 = []
    for i in range(100):
        labels1.append([1, 0])
        labels2.append([0, 1])

    data1.extend(data2)
    labels1.extend(labels2)

    return data1,labels1

def Toy_Data(num_Data = 100, num_Dim = 10, num_Class=3):
    np.random.seed(0)
    #X = 10 * np.random.randn(num_inputs, input_size)
    #y = np.array([0, 1, 2, 2, 1])

    X = np.random.random((num_Data, num_Dim))
    y_ = np.random.randint(num_Class, size=(num_Data, 1))
    y = y_[:,0]

    return X, y