package cn.edu.cqu.fredyvia;

import cn.edu.cqu.fredyvia.Count.CountMapper;
import cn.edu.cqu.fredyvia.Count.CountPartitioner;
import cn.edu.cqu.fredyvia.Count.CountReducer;
import cn.edu.cqu.fredyvia.Format.TFOutputFormat;
import cn.edu.cqu.fredyvia.Utils.Util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class CountVec {
    /*朴素贝叶斯若是使用多项式分布，则需要使用one-hot来表示文本向量*/
    /*一共需要四个输入
     * 第一个输入是训练集文本
     * 第二个输入是输出路径
     * 第三个输入是停词表
     * 第四个输入是标点符号*/


    public static void main(String[] args)
            throws Exception {
//
//        //检查输入
//        if (args.length != 5) {
//            System.err.println("Input parameters not enough");
//            System.exit(2);
//        }
//        for (String arg : args) {
//            System.out.println(arg);
//        }
//
        Path baseFolder = new Path(args[1]);
        Util.setBaseFolder(baseFolder);
        /*========================================================================================*/


        //开始将各个训练集和测试集中文档进行向量化
        Configuration conf = new Configuration();
        Path dataPath = new Path(args[0]);
        Path stopWordPath = new Path(new Path(args[2]).toUri());
        Path punctuationPath = new Path(new Path(args[3]).toUri());
        Job countWord = Job.getInstance(conf, "cn.edu.cqu.fredyvia.Count word");
        System.out.println(countWord == null);
        countWord.setJarByClass(CountVec.class);
        countWord.addCacheFile(stopWordPath.toUri());
        countWord.addCacheFile(punctuationPath.toUri());

        countWord.setMapperClass(CountMapper.class);
//        countWord.setCombinerClass(CountCombiner.class);
        countWord.setPartitionerClass(CountPartitioner.class);
        countWord.setReducerClass(CountReducer.class);

        countWord.setMapOutputKeyClass(Text.class);
        countWord.setMapOutputValueClass(IntWritable.class);

        countWord.setOutputKeyClass(NullWritable.class);
        countWord.setOutputValueClass(Text.class);
//        System.out.println("path:\t" + path);
//            String[] pathSplit = path.toString().split("/");
//            countWord.setProfileParams(pathSplit[pathSplit.length - 1].split("\\.")[0]);
//            countWord.setInputFormatClass(CSVNLineInputFormat.class);
        countWord.setOutputFormatClass(TFOutputFormat.class);
        FileInputFormat.addInputPath(countWord, dataPath);
        FileOutputFormat.setOutputPath(countWord, new Path(baseFolder + "/cn/edu/cqu/fredyvia/Count"));

        if (!countWord.waitForCompletion(true)) {
            System.err.println("count word errror while in file path: " + dataPath.toString());
            System.exit(-1);
        }
        System.out.println(countWord.getCounters().findCounter(CountMapper.FileRecorder.BadRecoder).getValue());
        System.out.println(countWord.getCounters().findCounter(CountMapper.FileRecorder.GoodRecorder).getValue());

    }
}
