#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 11 16:57:34 2018
treeTest
@author: tywin
"""
import trees as TR

myDat, labels = TR.creatDataSet()

print(TR.calcShannonEnt(myDat))

print(TR.splitDataSet(myDat,0,1))

print(TR.chooseBestFeatureToSplit(myDat))

#练习题
from collections import Counter
from math import log

outlook = {'sunny':[1,1,0,0,0],'overcast':[1,1,1,1],'rainy':[1,1,1,0,0]}
humidity = {'high':[1,1,1,0,0,0,0],'normal':[1,1,1,1,1,1,0]}
windy = {'windy_false':[1,1,1,1,1,1,0,0],'windy_true':[1,1,1,0,0,0]}
temperature = {'hot':[1,1,0,0],'mild':[1,1,1,1,0,0],'cool':[1,1,1,0]}

#熵的计算
def calcShannonEnt(dataSet):
    numEntries = len(dataSet)
    counter = Counter(dataSet)
    shannonEnt = 0.0
    for key in counter.keys():
        prob = float(counter[key]) / numEntries
        shannonEnt -= prob * log(prob,2)
    return shannonEnt

#获取信息增益
def calcInfoGain(dataSet):
    numEntries = sum(len(sub) for sub in dataSet.values())#获取特征数据总长度
    #numEntries = len(dataSet.values())#获取特征数据总长度
    #print(numEntries)
    baseSet = []
    for sub in dataSet.values():
        baseSet.extend(sub)
        
   # print(baseSet)
    #获取基础信息熵
    baseEntropy = calcShannonEnt(baseSet)
    
    #print(baseEntropy)
    #获取每种划分方式的信息熵
    newEntropy = 0.0
    for i in dataSet.values():
        prob = len(i)/float(numEntries)
        newEntropy += prob * calcShannonEnt(i)
        
    infoGain = baseEntropy - newEntropy
    return infoGain



print('outlook的信息增益为%f'%calcInfoGain(outlook))
print('humidity的信息增益为%f'%calcInfoGain(humidity))
print('windy的信息增益为%f'%calcInfoGain(windy))
print('temperature的信息增益为%f'%calcInfoGain(temperature))