package org.renwei.ml.trees;

import com.google.common.base.Charsets;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multiset;
import com.google.common.io.Files;
import org.renwei.ml.DataHelper;

import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by renwei on 16/8/25.
 */
public class ID3 {
    private String decisionColumn;  //决定字段



    //计算信息熵
    private double getEntropy(List<String> dataSet) {
        Multiset<String> labelsSet = HashMultiset.create(dataSet);

        double shannonEnt = 0.0f;
        for (Multiset.Entry<String> entry : labelsSet.entrySet()) {
            double prob = (double) entry.getCount() / dataSet.size();
            shannonEnt -= prob * Math.log(prob) / Math.log(2);
        }
        return shannonEnt;
    }

    //计算已知条件下的信息熵
    private double getConditionEntropy(Map<String, List<String>> dataSet, String indexCol) {
        double shannonEnt = 0.0f;
        double prob = 0.0f;
        int sum = dataSet.get(indexCol).size();
        Multiset<String> multiset = DataHelper.getTypeCounts(dataSet.get(indexCol));
        for (Multiset.Entry<String> entry : multiset.entrySet()) {
            prob = (double) entry.getCount() / sum;
            List<String> temp = DataHelper
                .getSubSet(DataHelper.getIndex(entry.getElement(), dataSet.get(indexCol)),
                    dataSet.get(this.decisionColumn));
            shannonEnt += prob * getEntropy(temp);
        }

        return shannonEnt;
    }



    /**
     * 输出决策树
     *
     * @param root
     */
    public void printDT(DTNode root) {
        if (null == root) {
            return;
        }
        System.out.println(root.getAttribute());
        if (null == root.getChildren()) {
            return;
        }

        for (String attr : root.getChildren().keySet()) {
            System.out.print(attr + " ");
            printDT(root.getChildren().get(attr));
        }
    }

    /**
     * 决策树分类器
     *
     * @param root    决策树
     * @param testMap 待测试数据集
     * @param result  分类结果
     */
    public void classify(DTNode root, Map<String, String> testMap, List<String> result) {
        if (0 == root.getChildren().size()) {
            //            System.out.println(root.getAttribute());
            result.add(root.getAttribute());
        }

        String value = testMap.get(root.getAttribute());
        Map<String, DTNode> childrens = root.getChildren();
        for (String attr : childrens.keySet()) {
            if (attr.equals(value)) {
                classify(childrens.get(attr), testMap, result);
            }
        }
    }

    /**
     * 建立决策树
     *
     * @param dataSet
     * @return
     */
    public DTNode buildDT(Map<String, List<String>> dataSet) {
        DTNode node = new DTNode();
        double infoEntropy = getEntropy(dataSet.get(this.decisionColumn));

        //递归结束条件
        if (0 == infoEntropy) {
            node.setAttribute(dataSet.get(this.decisionColumn).get(0));
            return node;
        }

        //求出拥有最小熵数据集的column，即最大的 entropy gain
        double maxGain = 0.f;
        double gain = 0.f;
        String maxColumn = null;
        double baseGain = getEntropy(dataSet.get(this.decisionColumn));

        for (String key : dataSet.keySet()) {
            if (key.equals(this.decisionColumn)) {
                continue;
            }
            gain = baseGain - getConditionEntropy(dataSet, key);
            if (gain > maxGain) {
                maxGain = gain;
                maxColumn = key;
            }
        }

        node.setAttribute(maxColumn);
        List<String> ds = dataSet.get(maxColumn);   //最小熵数据集

        //生成新数据集
        Multiset<String> multiset = DataHelper.getTypeCounts(ds);
        for (Multiset.Entry<String> entry : multiset.entrySet()) {
            List<Integer> indexList = DataHelper.getIndex(entry.getElement(), ds);

            //遍历生成其他属性的数据集
            Map<String, List<String>> subSet = Maps.newHashMap();
            for (String subKey : dataSet.keySet()) {
                if (!subKey.equals(maxColumn)) {
                    subSet.put(subKey, DataHelper.getSubSet(indexList, dataSet.get(subKey)));
                }
            }

            DTNode childNode = buildDT(subSet);
            node.getChildren().put(entry.getElement(), childNode);
        }

        return node;
    }


    public String getDecisionColumn() {
        return decisionColumn;
    }

    public void setDecisionColumn(String decisionColumn) {
        this.decisionColumn = decisionColumn;
    }
}


//节点类型
class DTNode {
    private String attribute;
    private HashMap<String, DTNode> children = Maps.newHashMap();

    public String getAttribute() {
        return attribute;
    }

    public void setAttribute(String attribute) {
        this.attribute = attribute;
    }

    public HashMap<String, DTNode> getChildren() {
        return children;
    }

    public void setChildren(HashMap<String, DTNode> children) {
        this.children = children;
    }

    @Override public String toString() {
        return "DTNode{" +
            "attribute='" + attribute + '\'' +
            ", children=" + children +
            '}';
    }
}
