# -*- coding: utf-8 -*-
"""
基尼系数
Created on Fri Apr 27 09:34:28 2018

@author: Allen
"""
import numpy as np
import matplotlib.pyplot as plt

from sklearn import datasets
iris = datasets.load_iris()
X = iris.data[:, 2:]
y = iris.target

### 模拟使用信息熵进行划分

def split( X, y, d, value ):
    index_a = ( X[:, d] <= value )
    index_b = ( X[:, d] > value )
    return X[index_a], X[index_b], y[index_a], y[index_b]

from collections import Counter
def gini( y ):
    counter = Counter( y )
    res = 1
    for num in counter.values():
        p = num / len( y )
        res -= p**2
    return res

def try_split( X, y ):
    best_entropy = float( 'inf' )
    best_d, best_v = -1, -1
    for d in range( X.shape[1] ):
        sorted_index = np.argsort( X[:, d] )
        for i in range( 1, len( X ) ):
            if X[sorted_index[i-1], d] != X[sorted_index[i], d]:
                v = (X[sorted_index[i-1], d] + X[sorted_index[i], d]) / 2
            X_l, X_r, y_l, y_r = split( X, y, d, v )
            e = gini( y_l ) + gini( y_r )
            if e < best_entropy:
                best_entropy, best_d, best_v = e, d, v
    return best_entropy, best_d, best_v

best_entropy, best_d, best_v = try_split( X, y )
'''
基尼系数：0.69314718055994529
维度：0
值：2.4500000000000002
'''
X1_l, X1_r, y1_l, y1_r = split( X, y, best_d, best_v )
best_entropy1, best_d1, best_v1 = try_split( X1_l, y1_l )
'''
基尼系数：0.0
维度：0
值：1.05
基尼系数是0，说明左子树无需再分
'''
best_entropy2, best_d2, best_v2 = try_split( X1_r, y1_r )
'''
基尼系数：0.2105714900645938
维度：1
值：1.75
右子树基尼系数不为零，需要继续分类
'''