import sys
sys.path.append('..')
import iris_data
import numpy as np
from sklearn.model_selection import train_test_split

class Node():
    """
    Node of KDTree
    """
    def __init__(self, father=None, left=None, 
                 right=None, value=None, feature=None, brother=None):
        self.father = father
        self.left = left
        self.right = right
        #value format (data, label)
        self.value = value
        #which feature we use to split
        self.feature = feature        


class KD_Tree():
    """
    kd tree
    """
          
    def _choose_feature(self, X):
        """
        choose the feature with maxium varians
        the we split in this dimension
        """
        variance = np.var(X, axis=0)
        #we need to exclude the last column
        #cause it is the label of data set
        variance = variance[:-1]
        feature_id = np.argmax(variance)
        return feature_id
    
    def _split_feature(self, X, feature_id):
        """
        split feature into left & right based on
        the feature have maxium variance
        """
        #sort x by feature_id
        #argsort returns the sorted index
        X = X[np.argsort(X[:,feature_id])]
        split_point = len(X) // 2
        left = X[:split_point]
        right = X[split_point+1 : ]
        split = X[split_point]
        return left, right, split
    
    def build_tree(self, X, father=None):
        """
        Recursively build kd tree
        """
        #termination condition
        if len(X) == 0:
            return None
        
        #apply logic
        node = Node()
        node.father = father
        feature_id = self._choose_feature(X)
        left, right, split = self._split_feature(X, feature_id)
        node.feature = feature_id
        node.value = (split[:-1], split[-1])
        #next
        node.left = self.build_tree(left, node)
        node.right = self.build_tree(right, node)
        return node

class KNN():
    """
    KNN based on kd tree
    """
    
    def _lp_distance(self, x1, x2, p=2):
        """
        return the distance of two data sample
        """
        s = np.sum(np.power(abs(x1 - x2), p))
        return np.power(s, 1/p)    
    
        
    def search(self, target, root, k):
        """
        search KD Tree
        to the leave node first, then backtracking.
        for every node:
            1. update the nearest neighbor 
            2. Check if the hypersphere intersects with the split line
                a. if instersects, node = brother's node. (to leave node then backtracking)
                b. if no, back to the father node
        """
        #neighbor elements (distance, node)
        neighbor = [(-1, None) for i in range(k)]
        self.neighbor = np.array(neighbor)
        def recurve(node):
            if not node:
                return
            #the feature use to split on this node
            feature_id = node.feature
            #the gap between target value and node split
            delta = target[feature_id] - node.value[0][feature_id]
            #to leave node. left/right child base on the delta
            if delta > 0:
                recurve(node.right)
            else:
                recurve(node.left)
                
            #1. update the nearest neighbor
            distance = self._lp_distance(target, node.value[0])   
            for i, v in enumerate(self.neighbor):
                #iter the neighbor list, if distance less then the existing neighbors, update it
                if v[0] == -1 or distance < v[0]:
                    #insert to the ith posithion
                    self.neighbor = np.insert(self.neighbor, i, [distance, node.value[1]], axis=0)
                    #remove the last one
                    self.neighbor = self.neighbor[:-1]
                    break
            
            #2. check if instersects.
            n = list(self.neighbor[:, 0]).count(-1)
            #max distance of exiting neighouts
            max_distance = self.neighbor[-n-1, 0]
            #if max_distance bigger than the distance to the node, that means they have intersects.
            if max_distance > abs(delta):
                # delta > 0 means targe value bigger then the split, 
                # we already visit right, now to left
                if delta > 0: 
                    recurve(node.left)
                else:
                    recurve(node.right)
        recurve(root)
        return self.neighbor
    
    def predict(self, target, root, k):
        knn = self.search(target, root, k)
        label = [i[1] for i in knn]
        
        return max(label, key=label.count)
            
    

if __name__ == '__main__':
    X, y = iris_data.get_data()
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.25, random_state=40)
    
    data = np.concatenate((X_train, y_train), axis=1)
    obj = KD_Tree()
    root = obj.build_tree(data)
    
    res = []
    for i in range(len(X_test)):
        res.append(KNN().predict(X_test[i], root, 6))
        
    res = np.array(res).reshape(y_test.shape)
    error_rate = (res != y_test).sum() / len(y_test)
    print(error_rate)
    