import numpy as np
from src.utils.DecisionTreeUtils import DecisionTreeUtils
from matplotlib.font_manager import FontProperties
import matplotlib.pyplot as plt

def getNumLeafs(myTree):
    numLeafs = 0
    firstStr = next(iter(myTree))
    secondDict = myTree[firstStr]
    for key in secondDict.keys():
        if type(secondDict[key]).__name__=='dict':
            numLeafs += getNumLeafs(secondDict[key])
        else:   numLeafs +=1
    return numLeafs


def getTreeDepth(myTree):
    maxDepth = 0
    firstStr = next(iter(myTree))
    secondDict = myTree[firstStr]
    for key in secondDict.keys():
        if type(secondDict[key]).__name__=='dict':
            thisDepth = 1 + getTreeDepth(secondDict[key])
        else:   thisDepth = 1
        if thisDepth > maxDepth: maxDepth = thisDepth
    return maxDepth

def plotNode(nodeTxt, centerPt, parentPt, nodeType):
    arrow_args = dict(arrowstyle="<-")
    # font = FontProperties(fname=r"c:\windows\fonts\simsunb.ttf", size=14)
    createPlot.ax1.annotate(nodeTxt, xy=parentPt,  xycoords='axes fraction',
                            xytext=centerPt, textcoords='axes fraction',
                            va="center", ha="center", bbox=nodeType, arrowprops=arrow_args)


# 6. 预测函数
def plotMidText(cntrPt, parentPt, txtString):
    xMid = (parentPt[0]-cntrPt[0])/2.0 + cntrPt[0]
    yMid = (parentPt[1]-cntrPt[1])/2.0 + cntrPt[1]
    createPlot.ax1.text(xMid, yMid, txtString, va="center", ha="center", rotation=30)


def plotTree(myTree, parentPt, nodeTxt):
    decisionNode = dict(boxstyle="sawtooth", fc="0.8")
    leafNode = dict(boxstyle="round4", fc="0.8")
    numLeafs = getNumLeafs(myTree)
    depth = getTreeDepth(myTree)
    firstStr = next(iter(myTree))
    cntrPt = (plotTree.xOff + (1.0 + float(numLeafs))/2.0/plotTree.totalW, plotTree.yOff)
    plotMidText(cntrPt, parentPt, nodeTxt)
    plotNode(firstStr, cntrPt, parentPt, decisionNode)
    secondDict = myTree[firstStr]
    plotTree.yOff = plotTree.yOff - 1.0/plotTree.totalD
    for key in secondDict.keys():
        if type(secondDict[key]).__name__=='dict':
            plotTree(secondDict[key],cntrPt,str(key))
        else:
            plotTree.xOff = plotTree.xOff + 1.0/plotTree.totalW
            plotNode(secondDict[key], (plotTree.xOff, plotTree.yOff), cntrPt, leafNode)
            plotMidText((plotTree.xOff, plotTree.yOff), cntrPt, str(key))
    plotTree.yOff = plotTree.yOff + 1.0/plotTree.totalD


def createPlot(inTree):
    fig = plt.figure(1, facecolor='white')													#创建fig
    fig.clf()																				#清空fig
    axprops = dict(xticks=[], yticks=[])
    createPlot.ax1 = plt.subplot(111, frameon=False, **axprops)    							#去掉x、y轴
    plotTree.totalW = float(getNumLeafs(inTree))											#获取决策树叶结点数目
    plotTree.totalD = float(getTreeDepth(inTree))											#获取决策树层数
    plotTree.xOff = -0.5/plotTree.totalW; plotTree.yOff = 1.0;								#x偏移
    plotTree(inTree, (0.5,1.0), '')															#绘制决策树
    plt.show()

# 3. 创建树模型算法(框架)
def create_tree(dataset, labels,featLabels): # featLabels: 节点列表(label的下标)，dataset:相关数据，labels:标签列表
    # 3.1 判断剩余的数据集的label是否相同，若相同，返回对应的标签
    classList = [example[-1] for example in dataset]
    if classList.count(classList[0]) == len(classList):    # 若所有标签相同，则返回该标签
        return classList[0]
    # 3.2 选择最优特征
    bestFeat = choose_best_feature(dataset, labels)
    bestFeatLabel = featLabels[bestFeat]    # 最优特征标签下表
    # 3.3 在数据集中删除该列，并加入节点列表
    del(labels[bestFeat])    # 删除最优特征标签
    featLabels.append(bestFeatLabel)    # 加入节点列表
    # 3.4 创建节点
    tree = {bestFeatLabel:{}}       # 定义树的根节点
    # 3.5 遍历数据集，将数据集分割成子集，并创建子节点
    for featValue in [example[bestFeat] for example in dataset]:
        subLabels = labels[:]    # 复制标签列表
        subLabels.remove(bestFeatLabel)    # 去除最优特征标签
        subDataset = split_dataset(dataset, bestFeat, featValue)    # 划分数据集
        # 递归创建子节点
        tree[bestFeatLabel][featValue] = create_tree(subDataset, subLabels, featLabels)
    return tree




# 4. 选择最优特征
def choose_best_feature(dataset, labels):
    return labels[0]



# 5. 划分数据集
def split_dataset(dataset, axis, value):
    return [example for example in dataset if example[axis] == value]



decisionTreeUtils = DecisionTreeUtils()
(data, labels) = decisionTreeUtils.create_data()
target_label = []
myTree = decisionTreeUtils.create_decision_tree(data, labels,target_label)
createPlot(myTree)



