import numpy as np
from math import log
import operator
import tensorflow as tf


# 计算熵
def getShanonEnt(dataSet):
    Mylength = len(dataSet)
    ClassLables = {}
    Myshanonent = 0.0
    # for i in range(Mylength):
    #     ClassLables[dataSet[i][-1]] = ClassLables.get(dataSet[i][-1], 0) + 1
    for featVex in dataSet:
        currentlable = featVex[-1]
        ClassLables[currentlable] = ClassLables.get(currentlable, 0) + 1
    for key in ClassLables:
        prob = float(ClassLables[key] / Mylength)
        Myshanonent -= prob * log(prob, 2)
    return Myshanonent


# 按需求切分数据集
def cutList(dataSet, index, value):
    NewList = []
    for line in dataSet:
        if line[index] == value:
            LineList = line[:index]
            LineList.extend(line[index + 1:])
            NewList.append(LineList)
    return NewList


# 选择最好的特征集
def chooseTheBestIndex(dataSet):
    num = np.shape(dataSet)[1] - 1
    Myshanonent = getShanonEnt(dataSet)
    featlist = []
    bestshanonent = 0.0
    bestfeature = -1
    for i in range(num):
        featlist = [example[i] for example in dataSet]
        featset = set(featlist)
        newshanonent = 0.0
        for value in featset:
            NewList = cutList(dataSet, i, value)
            prop = len(NewList) / len(dataSet)
            newshanonent += prop * getShanonEnt(NewList)
        inforGain = Myshanonent - newshanonent
        if (bestshanonent < inforGain):
            bestshanonent = inforGain
            bestfeature = i
    return bestfeature


# 当所有特征用完后，按总数最多分类
def classifyLast(classlist):
    classcount = {}
    for vote in classlist:
        classcount[vote] = classcount.get(vote, 0) + 1
    sortclasscount = sorted(classcount.items(), key=operator.itemgetter(1), reverse=True)
    return sortclasscount[0][0]


# 完成树的拼装
def creatTree(dataSet, lables):
    classlist = [example[-1] for example in dataSet]
    # 剩下所有分类都一样,不止一种情况的考虑
    if classlist.count(classlist[0]) == len(classlist):
        return classlist[0]
    # 当所有分类都都用完但是还没分出类
    if len(dataSet[0]) == 1:
        return classifyLast(classlist)
    bestfeat = chooseTheBestIndex(dataSet)
    bestLable = lables[bestfeat]
    myTree = {bestLable: {}}
    del (lables[bestfeat])
    featvalues = [example[bestfeat] for example in dataSet]
    uniquevalues = set(featvalues)
    for value in uniquevalues:
        sublables = lables[:]
        myTree[bestLable][value] = creatTree(cutList(dataSet, bestfeat, value), sublables)
    return myTree


# fr = open('C:/code/database/lenses.txt')
def createdataSet():
    dataSet = [[1, 1, 'yes'],
               [1, 1, 'yes'],
               [1, 0, 'no'],
               [0, 1, 'no'],
               [0, 1, 'no']]
    labels = ['no surfacing', 'flippers']
    # change to discrete values
    return dataSet, labels


dataSet, labels = createdataSet()
print(creatTree(dataSet, labels))
# print(getShanonEnt(dataSet))
# cutList(dataSet, 0, '1')
# print(chooseTheBestIndex(dataSet))
# print(classifyLast(list))
