package com.venn.pvuv;

import com.venn.demo.WordCount;
import com.venn.demo.WordCountMap;
import com.venn.demo.WordCountReduce;
import com.venn.util.HdfsUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.text.ParseException;

/**
 * Created by venn on 5/22/2018.
 */
public class UserViewDemo {

    public static void main(String[] args) throws IOException, ParseException {


        String filePathList = "/venn/";

        //hdfs://10.80.248.11:8020/hive/warehouse/bi_viewlog.db/bkr_channel_one_minute_channel/log_date=20180408/
        //TODO
        String outputPath = "hdfs://venn05:8082/utsc/input_hive_log/log_date=";

        if (!HdfsUtil.handleOutPutPath(outputPath)) {
            System.out.println("delete output path error, program exit.");
            System.exit(-1);
        }

        Configuration conf01 = new Configuration();
        // 设置输入文件分隔符
        conf01.set("mapred.textoutputformat.separator", ",");

        // 设置job名
        Job job01 = Job.getInstance(conf01, "generate_user_view_demo");
        ControlledJob jobCtrl01 = new ControlledJob(conf01);
        jobCtrl01.setJob(job01);
        // 执行的class
        job01.setJarByClass(WordCount.class);
        // mapper class
        job01.setMapperClass(WordCountMap.class);
        // reduce class
        job01.setReducerClass(WordCountReduce.class);

        job01.setMapOutputKeyClass(Text.class);
        job01.setMapOutputValueClass(Text.class);
        job01.setOutputKeyClass(Text.class);
        job01.setOutputValueClass(Text.class);
        job01.setNumReduceTasks(16);
        FileInputFormat.setInputDirRecursive(job01, true);
        // set inpup path
        FileInputFormat.addInputPath(job01, new Path(filePathList));
        // set output path
        FileOutputFormat.setOutputPath(job01, new Path(outputPath));

        JobControl jobControl = new JobControl("ctr");
        jobControl.addJob(jobCtrl01);

        Thread t = new Thread(jobControl);

        t.start();

        while (true) {
            if (jobControl.allFinished()) {
                System.out.println(jobControl.getSuccessfulJobList());
                jobControl.stop();
                break;
            }
        }

        System.out.println("Finished");
    }
}
