import os
import struct
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from    alive_progress import alive_bar

# 读取数据
def load_mnist(path, kind='train'):
    """Load MNIST data from `path`"""
    labels_path = path + '/' + kind + '-labels.idx1-ubyte'
    images_path = path + '/' + kind + '-images.idx3-ubyte'

    with open(labels_path, 'rb') as lbpath:
        magic, n = struct.unpack('>II',
                                 lbpath.read(8))
        labels = np.fromfile(lbpath,
                             dtype=np.uint8)

    with open(images_path, 'rb') as imgpath:
        magic, num, rows, cols = struct.unpack('>IIII',
                                               imgpath.read(16))
        images = np.fromfile(imgpath,
                             dtype=np.uint8).reshape(len(labels), 784)

    return images, labels

def visual_mnist(images, labels):
    # 查看mnist数据集的图片
    fig, ax = plt.subplots(
        nrows=2,
        ncols=5,
        sharex=True,
        sharey=True, )

    ax = ax.flatten()
    for i in range(10):
        img = images[labels == i][0].reshape(28, 28)
        ax[i].imshow(img, cmap='Greys', interpolation='nearest')

    ax[0].set_xticks([])
    ax[0].set_yticks([])
    plt.tight_layout()
    plt.show()

class Node(object):

    def __init__(self,feature,label,deepth,dim=None,left=None,right=None):
        self.feature = feature #表示对应样本的特征
        self.label   = label   #表示对应样本的分类
        self.deepth  = deepth  #表示对应节点的深度 根节点的深度为0
        self.dim  = dim   #表示进行切分的维度
        self.left = left  #表示左子节点
        self.right= right #表示右子节点

    def __str__(self):
        return str(self.label)  #print 一个 Node 类时会打印 __str__ 的返回值

class KNeighborsTreeClassifier(): # kd树

    def __init__(self,x,y,k):
        self.var_list = self.__varsort(x)
        self.k = k
        self.root = self.fit(x,y)

    def fit(self,x,y,deepth=0):

        m,n = x.shape

        if   m == 0:
            return None
        elif m == 1:
            return Node(x[0], y[0], deepth)
        else:
            dim_index = self.var_list[ deepth % n ]
            # 样本按最大方差特征进行升序排序后，取出位于中间的样本
            max_feat_ind_list = x[:,dim_index].argsort()
            mid_item_index = max_feat_ind_list[m // 2]

            left_child  = self.fit(x[max_feat_ind_list[:m // 2]]    , y[max_feat_ind_list[:m // 2]]    , deepth+1)
            right_child = self.fit(x[max_feat_ind_list[m // 2 + 1:]], y[max_feat_ind_list[m // 2 + 1:]], deepth+1)

            return Node( x[mid_item_index,:], y[mid_item_index], deepth, dim=dim_index, left=left_child, right=right_child)

    def score(self,x_test,y_test):
        y_pred = []
        with alive_bar(len(y_test)) as bar: # declare your expected total

            for i in range(len(y_test)):
                y_pred.append(self.pred( x_test[i,:] ))

                bar()

        total_num = len(y_test)
        corre_num = np.sum( (y_pred == y_test).astype(int) )
        acc = corre_num / total_num

        return acc

    def pred(self,x):
        NearestList = self.NearestKsearch(x,self.k)
        label = self.vote(NearestList)
        return label

    def cal_distance(self,feature,x):
        d = np.sqrt( np.sum( (feature-x)**2 ) )
        return d

    def vote(self,NearestList):
        label = [ x.label for x in NearestList]
        counts = {}
        for i in label:
            if i in counts.keys():
                counts[i] += 1
            else:
                counts[i] = 1
        sortdic = sorted(counts.items(),  key=lambda d: d[1], reverse=True)
        return sortdic[0][0]

    def __varsort(self, x):

        var_list = np.array([np.var(x[:,col]) for col in range(x.shape[1])]) # 获取每一个特征的方差
        var_list = np.argsort(-var_list)

        return var_list

    def binarysearch(self, x, startNode):

        searchpath = []
        node = startNode

        while node != None:
            searchpath.append(node)
            dim = node.dim
            if dim == None:
                break
            elif x[dim] < node.feature[dim]:
                node = node.left
            elif x[dim] >= node.feature[dim]:
                node = node.right
        
        return searchpath

    def Nearestsearch(self,x,NearestList=[]): # 最近邻搜索

        searchpath = self.binarysearch(x, self.root)
        shortest_dist = float('inf')
        shortest_node = None

        while searchpath:
            node = searchpath.pop()
            distance = self.cal_distance(node.feature, x)
            if (distance < shortest_dist) and (node not in NearestList):
                shortest_dist = distance
                shortest_node = node
            dim = node.dim
            if dim != None:
                vertical_distance = np.abs( int(node.feature[dim]) - int(x[dim]) )
                if vertical_distance < distance or (node in NearestList):
                    if node.feature[dim] <= x[dim]:
                        searchpath += self.binarysearch(x, node.left)
                    elif node.feature[dim] > x[dim]:
                        searchpath += self.binarysearch(x, node.right)
            else:
                continue

        return shortest_node
        
    def NearestKsearch(self,x,k): # k近邻搜索
        NearestList = []
        for _ in range(k):
            NearestList.append(self.Nearestsearch(x,NearestList))
        return NearestList

class KNeighborsLinerClassifier(): # 线性搜索

    def __init__(self,x,y,k):
        self.fit(x,y)
        self.k = k

    def fit(self,x,y):
        self.x = x
        self.y = y
    
    def score(self,x_test,y_test):

        y_pred = []
        with alive_bar(len(y_test)) as bar: # declare your expected total
            for i in range(len(y_test)):
                y_pred.append(self.pred( x_test[i,:] ))
                bar()

        total_num = len(y_test)
        corre_num = np.sum( (y_pred == y_test).astype(int) )
        acc = corre_num / total_num

        return acc

    def pred(self,x):

        d = self.cal_distance(self.x, x)
        max_ind = np.argsort(d)
        y = self.y[max_ind[:self.k]]
        y = self.vote(y)

        return y

    def cal_distance(self,feature,x):
        d = np.sum( (feature-x)**2, axis=1 ) 
        return d

    def vote(self,y):

        counts = {}
        for i in y:
            if i in counts.keys():
                counts[i] += 1
            else:
                counts[i] = 1
        sortdic = sorted(counts.items(),  key=lambda d: d[1], reverse=True)

        return sortdic[0][0]


# 主函数
def main():

    # 加载数据，打印基本信息
    images_train, labels_train = load_mnist('../Datasets/mnist')
    print('Images_train Shape:{:<25s} Labels_train Shape:{:<25s}'
            .format(str(images_train.shape),str(labels_train.shape)))
    print('Images_train type :{:<25s} Labels_train type :{:<25s}'
            .format(str(type(images_train)),str(type(labels_train))))
    images_test, labels_test = load_mnist('../Datasets/mnist','t10k')
    print('Images_test Shape :{:<25s}  Labels_test Shape:{:<25s}'
            .format(str(images_test.shape),str(labels_test.shape)))
    print('Images_test type  :{:<25s}  Labels_test type :{:<25s}'
            .format(str(type(images_test)),str(type(labels_test))))

    # 创建模型
    k = 1
    tree = KNeighborsTreeClassifier( images_train,labels_train,k)
    lknn = KNeighborsLinerClassifier(images_train,labels_train,k)

    # 测试
    print('Liner Serach ACC:',tree.score(images_test,labels_test))
    print('   KDTree    ACC:',lknn.score(images_test,labels_test))

if __name__ == '__main__':
    main()