#构建kd树并运用kd树进行搜索  kd树的构建最为重要，kd树的插入和构建是有区别的，如果不调整树的结构直接插入，则会导致树不是最优结构
import numpy
class Node:
    def __init__(self, data=None, split_index=0):
        self.data = data
        self.left_child = None
        self.right_child = None
        self.split_index =split_index

class KDTree:
    def __init__(self,data):
        self.root = None
        self.dim = data.ndim#数据的维度
        self.create_tree(data, order=0)  # 递归构建KD树
        self.best_distance = numpy.inf
        self.best_Node = None
        self.current_Node=self.root
        self.visited=[]
        self.k_nearests=[]
    @staticmethod
    def distance(data1,data2):
        return numpy.linalg.norm(data1-data2)
    def is_empty(self):
        return not self.root

    def append(self, curr_node, point):
        self.k_nearests.append(curr_node)
        self.k_nearests.sort(key = lambda x: self.distance(x.data, point))
    def create_tree(self, data,order=0):
        if len(data) < 1:
            return
        data = sorted(data, key=lambda x: x[order % self.dim])  # 按某个维度进行排序
        idx = len(data) // 2
        node = Node(data[idx], order%self.dim)
        left_data = data[:idx]
        right_data = data[idx + 1:]
        if self.is_empty():
            self.root = node  # 整个KD树的根节点
        node.left_child = self.create_tree(left_data, order + 1)  # 递归构建左子树
        node.right_child = self.create_tree(right_data, order + 1)  # 递归构建右子树
        return node

    def NearestNodeSearch(self,data,current_Node):
        if current_Node==None:
            return
        print(current_Node.data)
        self.visited.append(current_Node)
        distance=KDTree.distance(data,current_Node.data)
        if distance<self.best_distance:
            self.best_distance=distance
            self.best_Node=current_Node
        if data[current_Node.split_index]<current_Node.data[current_Node.split_index]:
            self.NearestNodeSearch(data,current_Node.left_child)
        else:
            self.NearestNodeSearch(data,current_Node.right_child)
        #以上是递归寻找叶子节点
        if numpy.abs(current_Node.data[current_Node.split_index]-data[current_Node.split_index])<self.best_distance:
            child = current_Node.left_child if current_Node.left_child not in self.visited else current_Node.right_child
            self.NearestNodeSearch(data, child)

    def k_nearest_node_search(self,data, current_Node, k):
        if current_Node is None:
            return None
        self.visited.append(current_Node)
        if len(self.k_nearests) < k:  # 如果当前还没找到k个点，则直接进行保存
            self.append(current_Node,data)
        else:  # 已经找到k个局部最优点，开始进行筛选
            if self.distance(current_Node.data, data) < self.distance(data, self.k_nearests[-1].data):
                self.k_nearests.pop()  # 移除最后一个
                self.append(current_Node, data)  # 加入新的点并进行排序
        if data[current_Node.split_index]<current_Node.data[current_Node.split_index]:
            self.k_nearest_node_search(data, current_Node.left_child,k)
        else:
            self.k_nearest_node_search(data,current_Node.right_child,k)
        if len(self.k_nearests) < k or numpy.abs(current_Node.data[current_Node.split_index]-data[current_Node.split_index]) < self.distance(data,self.k_nearests[-1].data):
            child=current_Node.left_child if current_Node.left_child not in self.visited else current_Node.right_child
            self.k_nearest_node_search(data, child,k)#
data = numpy.array([[2, 5], [1, 4], [3, 3], [6, 5], [10, 2.], [7, 3], [8, 13], [8, 9], [1, 2]])
kdtree=KDTree(data)
#利用kd树寻找最近邻
kdtree.NearestNodeSearch(numpy.array([10,3]),kdtree.root)