/**
 * Created by Sun on 2015/9/14.
 */


import java.io.*;
import java.net.URI;
import java.util.List;
import java.util.Random;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.mahout.classifier.df.Bagging;
import org.apache.mahout.classifier.df.builder.DecisionTreeBuilder;
import org.apache.mahout.classifier.df.builder.TreeBuilder;
import org.apache.mahout.classifier.df.data.Data;
import org.apache.mahout.classifier.df.data.DataConverter;
import org.apache.mahout.classifier.df.data.Dataset;
import org.apache.mahout.classifier.df.data.Instance;
import org.apache.mahout.classifier.df.node.Node;
import org.apache.mahout.common.RandomUtils;
import org.apache.hadoop.fs.FileSystem;
import com.google.common.collect.Lists;

public class TestBuildTree {

    /**
     * use the Mahout source code to build a decision tree
     * @param args
     * @throws IOException
     */
    public static void main(String[] args) throws IOException {

        //conf.set("mapred.jar", "Test-1.0-SNAPSHOT-jar-with-dependencies.jar");
        //conf.set("fs.default.name", "hdfs://192.168.111.1:9000");//namenode的地址,需要替换
        //conf.set("mapred.job.tracker", "192.168.111.1:9001");//namenode的地址
        //String dst = "hdfs://101.227.247.192:9000/user/hive/warehouse/telecomdata_all";
        String dst = "hdfs://101.227.247.192:9000/user/hive/warehouse/telecomdata_all/2015.02.csv";
        Configuration conf = new Configuration();


        FileSystem fs = FileSystem.get(URI.create(dst), conf);

        FSDataInputStream hdfsInStream = fs.open(new Path(dst));

        byte[] ioBuffer = new byte[1024];
        hdfsInStream.read(ioBuffer);

        //读取数据
        Path dsPath = new Path("/home/fansy/workspace/MahTestDemo/car_small.info");

        //建立决策树
        String dataPath="/home/fansy/mahout/data/forest/car_test_small.txt";
        Random rng=RandomUtils.getRandom(555);

        // create dataset
        Dataset ds=Dataset.load(new Configuration(), dsPath);
        // create converter
        DataConverter converter = new DataConverter(ds);
        // load data
        Data data = loadData(ds,converter,dataPath);

        // create treeBuilder and build tree
        TreeBuilder treeBuilder=new DecisionTreeBuilder();
        Bagging bag=new Bagging(treeBuilder,data);
        Node tree=bag.build(rng);

        System.out.println("the tree is builded"+tree);

    }
    /**
     * load data from the given data path
     * @param ds  :dataset
     * @param converter: DataConverter
     * @param dataPath  : data path
     * @return  Data
     * @throws IOException
     */
    public static Data loadData(Dataset ds,DataConverter converter,String dataPath) throws IOException{
        List<Instance> instances=Lists.newArrayList();

        File dataSourthPath=new File(dataPath);
        FileReader fileReader=null;
        try {
            fileReader = new FileReader(dataSourthPath);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
        BufferedReader bf = new BufferedReader(fileReader);
        String line=null;

        try {
            while((line=bf.readLine())!=null){
                instances.add(converter.convert(line));
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        bf.close();
        fileReader.close();

        System.out.println("load  file to Data done ...");

        return new Data(ds,instances);
    }

}