import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns

# 读取数据
def load_iris():
    
    iris = pd.read_csv("../Datasets/iris/Iris.csv")
    # print(iris.head())
    # print(iris["Species"].value_counts())
    #sns初始化
    # sns.set()
    #设置散点图x轴与y轴以及data参数
    # sns.relplot(x='SepalLengthCm', y='SepalWidthCm', hue='Species', style='Species', data=iris )
    # plt.title('SepalLengthCm and SepalWidthCm data analysize')
    # plt.show()
    # 取前一百行的前两列作为感知机的特征，最后一列作为输出
    data = np.array(iris)
    x, y = data[:,1:-1], data[:,-1]

    return x,y

# 节点定义
class Node(object):

    def __init__(self,feature,label,deepth,dim=None,left=None,right=None):
        self.feature = feature #表示对应样本的特征
        self.label   = label   #表示对应样本的分类
        self.deepth  = deepth  #表示对应节点的深度 根节点的深度为0
        self.dim  = dim   #表示进行切分的维度
        self.left = left  #表示左子节点
        self.right= right #表示右子节点

    def __str__(self):
        return str(self.label)  #print 一个 Node 类时会打印 __str__ 的返回值

class KNeighborsTreeClassifier(): # kd树

    def __init__(self,x,y,k):
        self.var_list = self.__varsort(x)
        self.k = k
        self.root = self.fit(x,y)

    def fit(self,x,y,deepth=0):

        m,n = x.shape

        if   m == 0:
            return None
        elif m == 1:
            return Node(x[0], y[0], deepth)
        else:
            dim_index = self.var_list[ deepth % n ]
            # 样本按最大方差特征进行升序排序后，取出位于中间的样本
            max_feat_ind_list = x[:,dim_index].argsort()
            mid_item_index = max_feat_ind_list[m // 2]

            left_child  = self.fit(x[max_feat_ind_list[:m // 2]]    , y[max_feat_ind_list[:m // 2]]    , deepth+1)
            right_child = self.fit(x[max_feat_ind_list[m // 2 + 1:]], y[max_feat_ind_list[m // 2 + 1:]], deepth+1)

            return Node( x[mid_item_index,:], y[mid_item_index], deepth, dim=dim_index, left=left_child, right=right_child)

    def score(self,x_test,y_test):
        y_pred = []
        for i in range(len(y_test)):
            y_pred.append(self.pred( x_test[i,:] ))

        total_num = len(y_test)
        corre_num = np.sum( (y_pred == y_test).astype(int) )
        acc = corre_num / total_num

        return acc

    def pred(self,x):
        NearestList = self.NearestKsearch(x,self.k)
        label = self.vote(NearestList)
        return label

    def cal_distance(self,feature,x):
        d = np.sqrt( np.sum( (feature-x)**2 ) )
        return d

    def vote(self,NearestList):
        label = [ x.label for x in NearestList]
        counts = {}
        for i in label:
            if i in counts.keys():
                counts[i] += 1
            else:
                counts[i] = 1
        sortdic = sorted(counts.items(),  key=lambda d: d[1], reverse=True)
        return sortdic[0][0]

    def __varsort(self, x):

        var_list = np.array([np.var(x[:,col]) for col in range(x.shape[1])]) # 获取每一个特征的方差
        var_list = np.argsort(-var_list)

        return var_list

    def binarysearch(self, x, startNode):

        searchpath = []
        node = startNode

        while node != None:
            searchpath.append(node)
            dim = node.dim
            if dim == None:
                break
            elif x[dim] < node.feature[dim]:
                node = node.left
            elif x[dim] >= node.feature[dim]:
                node = node.right
        
        return searchpath

    def Nearestsearch(self,x,NearestList=[]): # 最近邻搜索

        searchpath = self.binarysearch(x, self.root)
        shortest_dist = float('inf')
        shortest_node = None

        while searchpath:
            node = searchpath.pop()
            distance = self.cal_distance(node.feature, x)
            if (distance < shortest_dist) and (node not in NearestList):
                shortest_dist = distance
                shortest_node = node
            dim = node.dim
            if dim != None:
                vertical_distance = np.abs( node.feature[dim] - x[dim] )
                if vertical_distance < distance or (node in NearestList):
                    if node.feature[dim] <= x[dim]:
                        searchpath += self.binarysearch(x, node.left)
                    elif node.feature[dim] > x[dim]:
                        searchpath += self.binarysearch(x, node.right)
            else:
                continue

        return shortest_node
        
    def NearestKsearch(self,x,k): # k近邻搜索
        NearestList = []
        for _ in range(k):
            NearestList.append(self.Nearestsearch(x,NearestList))
        return NearestList

class KNeighborsLinerClassifier(): # 线性搜索

    def __init__(self,x,y,k):
        self.fit(x,y)
        self.k = k

    def fit(self,x,y):
        self.x = x
        self.y = y
    
    def score(self,x_test,y_test):

        y_pred = []
        for i in range(len(y_test)):
            y_pred.append(self.pred( x_test[i,:] ))

        total_num = len(y_test)
        corre_num = np.sum( (y_pred == y_test).astype(int) )
        acc = corre_num / total_num

        return acc

    def pred(self,x):

        d = self.cal_distance(self.x, x)
        max_ind = np.argsort(d)
        y = self.y[max_ind[:self.k]]
        y = self.vote(y)

        return y

    def cal_distance(self,feature,x):
        d = np.sum( (feature-x)**2, axis=1 ) 
        return d

    def vote(self,y):

        counts = {}
        for i in y:
            if i in counts.keys():
                counts[i] += 1
            else:
                counts[i] = 1
        sortdic = sorted(counts.items(),  key=lambda d: d[1], reverse=True)

        return sortdic[0][0]

# 主函数
def main():

    # 加载数据，打印基本信息
    x,y = load_iris()
    print('x Shape:{:<25s} y Shape:{:<25s}'.format(str(x.shape),str(y.shape)))
    print('x type :{:<25s} y type :{:<25s}'.format(str(type(x)),str(type(y))))
 
    # 数据切分
    x_train = np.concatenate((x[:40,:],  x[50:90,:], x[100:140,:]), axis=0)
    y_train = np.concatenate((y[:40],    y[50:90],   y[100:140]  ), axis=0)
    x_test  = np.concatenate((x[40:50,:],x[90:100,:],x[140:150,:]), axis=0)
    y_test  = np.concatenate((y[40:50],  y[90:100],  y[140:150]  ), axis=0)

    # 创建模型
    k = 1
    tree = KNeighborsTreeClassifier(x_train,y_train,k)
    knn = KNeighborsLinerClassifier(x_train,y_train,k)

    # 测试
    print('Liner Serach ACC:',knn.score(x_test,y_test))
    print('   KDTree    ACC:',tree.score(x_train,y_train))

if __name__ == '__main__':
    main()