"""
不考虑缺失值
决策树 cart 算法，增加了后剪枝 post_pruning()、代价复杂度剪枝 ccp() 和打印决策树 show()
"""

import os
import numpy as np
from node import NodeClassifer, NodeRegressor


class DecisionTreeBase:
    """
    :param min_samples_split: 规定节点样例少于此值就不能分裂
    :type  min_samples_split: int
    :param min_samples_leaf: 叶子节点的样例数不能少于此值
    :type  min_samples_leaf: int
    :param min_impurity_decrease: 节点分裂的增益至少要达到此值
    :type  min_impurity_decrease: float
    :param max_leaf_nodes: 决策树的叶子节点不能超过此值
    :type  max_leaf_nodes: int
    :param max_depth: 决策树的最大深度
    :type  max_depth: int
    :param ccp_alpha: 代价复杂度剪枝参数，0.0 代表不进行代价复杂度剪枝
    :type  ccp_alpha: float
    """

    def __init__(self, min_samples_split=2,
                 min_samples_leaf=1,
                 min_impurity_decrease=0.0,
                 max_leaf_nodes=None,
                 max_depth=None,
                 ccp_alpha=0.0):
        self.min_samples_split = min_samples_split
        self.min_samples_leaf = min_samples_leaf
        self.min_impurity_decrease = min_impurity_decrease
        self.max_leaf_nodes = max_leaf_nodes
        self.max_depth = max_depth
        self.ccp_alpha = ccp_alpha
        self.eps = 1e-6
        self.root = self.get_root()

    def get_root(self):
        raise NotImplementedError

    def _fit(self, x, y):
        """训练决策树

        :param x: 训练集
        :type  x: np.ndarray
        :param y: 标签
        :type  y: np.ndarray
        """
        num_samples = x.shape[0]
        self.root.tmp['training_index']= np.arange(num_samples)

        # 设置节点类的类属性
        self.root.__class__.root_data = x
        self.root.__class__.root_target = y

        sorted_index = np.argsort(x, axis=0)
        self.root.tmp['sorted_index'] = sorted_index
        self.root.tmp['sorted_data']= np.take_along_axis(x, sorted_index, axis=0)
        self.root.tmp['sorted_target']= y[sorted_index]

        num_leaf_nodes = 0
        nodes = [self.root]
        while nodes:
            if self.max_leaf_nodes is not None and num_leaf_nodes > self.max_leaf_nodes:
                break
            node = nodes.pop(0)  # 广度优先
            #  node = nodes.pop(-1)  # 深度优先
            node.split(self.min_samples_split,
                       self.max_depth, self.min_impurity_decrease,
                       self.min_samples_leaf)
            nodes += list(node.children.values())
            if not node.children:
                num_leaf_nodes += 1

    def predict(self, x, root=None):
        """以 root 为根节点的决策树来预测测试集x

        :param x: 测试集
        :type  x: ndarray
        :param root: 根节点
        :type  root: Node
        :returns: 预测结果（1d: ndarray）
        """
        root = root if root  else self.root

        if "Classifer" in self.__class__.__name__:
            preds = np.zeros(x.shape[0], dtype=int)
        else:
            preds = np.zeros(x.shape[0])
        index = np.arange(x.shape[0])
        nodes = [(root, index)]
        while len(nodes) > 0:
            node, index = nodes.pop(0)
            node.tmp['index'] = index
            node_preds = node.predict(x[index], index)
            if node_preds['label'] is not None:
                preds[index] = node_preds['label']
            else:
                for child, index in zip(node_preds['children'], node_preds['index']):
                    nodes.append((child, index))
        return preds

    def score(self, x, y, root=None):
        raise NotImplementedError

    def get_nodes(self, root):
        """遍历以 root 为根的决策树的所有节点"""
        root = self.root if root is None else root
        nodes = [root]
        while nodes:
            node = nodes.pop(0)
            yield node
            nodes += list(node.children.values())

    def get_leaf_nodes(self, root=None):
        """遍历以 root 为根的决策树的所有叶子节点"""
        for node in self.get_nodes(root):
            if len(node.children) == 0:
                yield node

    def get_branch_nodes(self, root=None):
        """遍历以 root 为根的决策树的所有枝节点"""
        for node in self.get_nodes(root):
            if node.children:
                yield node

    def post_pruning(self, x, y):
        self.predict(x)  # 使得node的tmp包含'index'属性

        branch_nodes = list(self.get_branch_nodes())
        for node in branch_nodes[::-1]:
            i = node.tmp['index']
            data, target = x[i], y[i]
            before = self.score(data, target, node)
            tmp = node.children
            node.children = {}
            after = self.score(data, target, node)
            if after < before:
                node.children = tmp
            else:
                node.feature = None
                node.threshhold = None

    def get_alpha(self, node):
        raise NotImplementedError

    def ccp(self):
        """
        最终选取 alpha 最大的子树
        """
        if self.ccp_alpha <= 0.0:
            return

        cuted_nodes = set()
        while self.root.children:
            alpha_min = np.inf
            node_need_cut = None
            for node in self.get_branch_nodes():
                if node in cuted_nodes:
                    continue
                alpha = self.get_alpha(node)
                if alpha <= alpha_min:
                    alpha_min = alpha
                    node_need_cut = node
            if alpha_min >= self.ccp_alpha:
                break

            # 剪掉 node_need_cut
            if node_need_cut is not None:
                for node in self.get_branch_nodes(node_need_cut):
                    cuted_nodes.add(node)
                node_need_cut.cut()

    def show(self, save='decission_tree', root=None):
        """打印决策树

        :param save: 保存打印图片的文件名称
        :type  save: str
        :param root: 打印以root 为根的决策树
        :type  root: Node
        """
        from graphviz import Digraph
        from utils import get_color
        dot = Digraph(comment='决策树')

        def print_(node):
            # 递归法
            is_leaf = False if node.children else True
            shape = 'egg' if is_leaf else 'box'

            color = get_color(node.target, classes)
            node_comment, edge_comment = node.description()
            dot.attr('node', shape=shape, style='filled', fillcolor=color)
            dot.node(str(id(node)), node_comment)
            if node.father is not None:
                dot.edge(str(id(node.father)), str(id(node)), edge_comment)
            for child in node.children.values():
                print_(child)

        root = self.root if root is None else root

        if "Classifer" in self.__class__.__name__:
            classes = np.unique(root.target)
        else:
            classes = None
        print_(root)
        dot.format = 'pdf'
        dot.render(save, view=True)
        os.remove(save)


class DecisionTreeClassifier(DecisionTreeBase):
    def __init__(self, *args, **kwargs):
        super(DecisionTreeClassifier, self).__init__(*args, **kwargs)

    def get_root(self):
        return NodeClassifer()

    def fit(self, x, y):
        self._fit(x, y)
        return self

    def get_alpha(self, node):
        # 以 node 为根的子树对 node.data 的预测错误数量
        RT = np.sum(self.predict(node.data, node) != node.target)
        Rt = np.sum(node.label != node.target)
        num_leaf = len(list(self.get_leaf_nodes(node)))
        alpha = (Rt - RT) / (num_leaf - 1) / self.root.data.shape[0]
        return alpha

    def score(self, x, y, root=None):
        preds = self.predict(x, root)
        return np.mean(preds == y)


class DecisionTreeRegressor(DecisionTreeBase):
    def __init__(self, *args, **kwargs):
        super(DecisionTreeRegressor, self).__init__(*args, **kwargs)

    def get_root(self):
        return NodeRegressor()

    def fit(self, x, y):
        self._fit(x, y)
        return self

    def get_alpha(self, node):
        # 以 node 为根的子树对 node.data 的预测错误数量
        RT = np.sum(np.square(self.predict(node.data, node) - node.target))
        Rt = np.sum(np.square(node.label - node.target))
        num_leaf = len(list(self.get_leaf_nodes(node)))
        alpha = (Rt - RT) / (num_leaf - 1) / self.root.data.shape[0]
        return alpha

    def score(self, x, y, root=None):
        """r2_score"""
        preds = self.predict(x, root)
        return 1 - ((np.sum(np.square(preds - y))) / (np.sum(np.square(y - y.mean()))))
