package com.mango.ch02;

import com.mango.HDFSTools.HDFSAPI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.net.URISyntaxException;

/**
 * 自定义组合键的二次排序
 *
 * @author Mango
 */
public class SparkSortJob {
    //	static Path INPUT_PATH = new Path("/SparkSortJob/input");
//	static Path OUTPUT_PATH = new Path("/SparkSortJob/output");
    static Path INPUT_PATH = new Path("D:/HadoopData/input");
    static Path OUTPUT_PATH = new Path("D:/HadoopData/output");

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
//		checkPath(INPUT_PATH,OUTPUT_PATH);
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        // 并且本class与业务逻辑的代码打包在一个jar包中，可以动态的获取到jar包所在的位置）
        job.setJarByClass(SparkSortJob.class);
        job.setJobName("SparkSortJob");

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        job.setNumReduceTasks(2);

        job.setMapperClass(SparkSortMapper.class);
        job.setReducerClass(SparkSortReducer.class);

        job.setMapOutputKeyClass(CompositeKey.class);
        job.setMapOutputValueClass(NaturalValue.class);
        //	设置排序器
        job.setSortComparatorClass(Compositekeycomparator.class);

        // 设置分区器
        job.setPartitionerClass(NaturalKeyPartitioner.class);

        // 设置分组器
        job.setGroupingComparatorClass(NaturalKeyGroupingComparator.class);

        FileInputFormat.setInputPaths(job, INPUT_PATH);
        FileOutputFormat.setOutputPath(job, OUTPUT_PATH);
        boolean status = job.waitForCompletion(true);
        System.exit(status ? 0 : 1);
    }

    private static void checkPath(Path iNPUT_PATH2, Path oUTPUT_PATH2) {
        // TODO Auto-generated method stub
        // 创建输入文件夹
        try {
            HDFSAPI hdfs = new HDFSAPI();
            hdfs.createDirectory(iNPUT_PATH2);
            // 先删除已经有的输出文件夹
            hdfs.delDirectory(oUTPUT_PATH2);
            // hdfs.orpOver();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
            System.out.println("----------文件操作失败");
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (URISyntaxException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}
