package com.mango.ch04;

import com.mango.HDFSTools.HDFSAPI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.net.URISyntaxException;

public class MR_Left_Out_Join_Job extends Configured implements Tool {
    static Path transactions = new Path("d:/HadoopData/MR_Left_Out_Join_Job/input/transactions.txt");// input
    static Path users = new Path("d:/HadoopData/MR_Left_Out_Join_Job/input/users.txt"); // input
    static Path outPath = new Path("d:/HadoopData/MR_Left_Out_Join_Job/output");
    static Path outPath2 = new Path("d:/HadoopData/MR_Left_Out_Join_Job/step2_output");
    static Path inputPath = new Path("d:/HadoopData/MR_Left_Out_Join_Job/input");

    @Override
    public int run(String[] args) throws Exception {
        // TODO Auto-generated method stub
//		checkPath();
        Configuration conf = getConf();
        Job job = Job.getInstance(conf);
        job.setJarByClass(MR_Left_Out_Join_Job.class);
        job.setJobName("Phase-1: MR_Left_Out_Join_Job");
        // "secondary sort" is handled by setting the following 3 plug-ins:
        // 1. how the mapper generated keys will be partitioned
        job.setPartitionerClass(SecondarySortPartitioner.class);
        // 2. how the natural keys (generated by mappers) will be grouped
        job.setGroupingComparatorClass(SecondarySortGroupComparator.class);
        // 3. how PairOfStrings will be sorted
        job.setSortComparatorClass(MyPairComparator.class);// 控制自定义键值对的排序
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        job.setReducerClass(MR_Left_Out_Join_Rducer.class);
        // 添加多个mapper 并行处理
        MultipleInputs.addInputPath(job, users, TextInputFormat.class,
                MR_Left_Out_Join_Mapper.MR_Left_Out_Join_UserMapper.class);
        MultipleInputs.addInputPath(job, transactions, TextInputFormat.class,
                MR_Left_Out_Join_Mapper.MR_Left_Out_Join_TranscationMapper.class);
        job.setMapOutputKeyClass(MyPair.class);
        job.setMapOutputValueClass(MyPair.class);
        FileOutputFormat.setOutputPath(job, outPath);
        ControlledJob cj = new ControlledJob(conf);
        cj.setJob(job);

        Job job2 = Job.getInstance(conf);
        job2.setJarByClass(MR_Left_Out_Join_Job.class);
        job2.setJobName("Phase-2: LocationCountDriver");

        FileInputFormat.addInputPath(job2, outPath);
        FileOutputFormat.setOutputPath(job2, outPath2);
        job2.setInputFormatClass(TextInputFormat.class);

        job2.setMapperClass(LocationCountMapper.class);
        job2.setReducerClass(LocationCountReducer.class);

        job2.setMapOutputKeyClass(Text.class);
        job2.setMapOutputValueClass(Text.class);

        job2.setOutputKeyClass(Text.class);
        job2.setOutputValueClass(LongWritable.class);
        ControlledJob cj2 = new ControlledJob(conf);
        cj2.setJob(job2);
        cj2.addDependingJob(cj);
        JobControl jc = new JobControl(this.getClass().getName());
        jc.addJob(cj);
        jc.addJob(cj2);
        new Thread(jc).start();
        while (true) {
            for (ControlledJob tmpCj : jc.getRunningJobList()) {
                tmpCj.getJob().monitorAndPrintJob();
            }
            if (jc.allFinished())
                break;
        }
        return 0;
    }

    private void checkPath() {
        // 创建输入文件夹
        try {
            HDFSAPI hdfs = new HDFSAPI();
            hdfs.createDirectory(inputPath);
            // 先删除已经有的输出文件夹
            hdfs.delDirectory(outPath);
            hdfs.delDirectory(outPath2);
            // hdfs.orpOver();
        } catch (IOException e1) {
            e1.printStackTrace();
            System.out.println("----------文件操作失败");
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }
    }

    public static void main(String[] args) throws Exception {
        int status;
        try {
            status = ToolRunner.run(new Configuration(), new MR_Left_Out_Join_Job(), args);
            System.exit(status);
        } catch (Exception e) {
            System.out.println("app error");
            e.printStackTrace();
        }

    }
}
