package com.leadbank.bigdata.mapreduce.sort;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.URI;

/**
 * Created by hp on 2018/5/10.
 */
public class MySortJob extends Configured implements Tool {

    public static class MySortMapper extends  Mapper<LongWritable, Text, MyNewKey, LongWritable> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] splited = value.toString().split("\t");
            long firstnum = Long.parseLong(splited[0]);
            long secondnum = Long.parseLong(splited[1]);
            MyNewKey myNewKey = new MyNewKey(firstnum,secondnum);
            context.write(myNewKey, new LongWritable(secondnum));
        }
    }

    public static class MySortReduce extends Reducer<MyNewKey, LongWritable, Text, LongWritable> {
        @Override
        protected void reduce(MyNewKey key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
//            for (LongWritable value : values) {
//                context.write(new Text(String.valueOf(key.firstnum)), value);
//            }
            long min = Long.MAX_VALUE;
            for(LongWritable number :values) {
                long temp = number.get();
                if(temp < min) {
                    min = temp;
                }
            }
            context.write(new Text(String.valueOf(key.firstnum)), new LongWritable(min));
        }
    }

    private static class MyNewKey implements WritableComparable<MyNewKey> {

        long firstnum;
        long secondnum;

        public MyNewKey(){

        }

        public MyNewKey(long firstnum, long secondnum) {
            this.firstnum = firstnum;
            this.secondnum = secondnum;
        }

        @Override
        public int compareTo(MyNewKey anotherkey) {
            long min = firstnum - anotherkey.firstnum;
            if(min != 0) {
                return (int) min;
            } else {
                return (int) (secondnum - anotherkey.secondnum);
            }
        }

        @Override
        public void write(DataOutput out) throws IOException {
            out.writeLong(firstnum);
            out.writeLong(secondnum);
        }

        @Override
        public void readFields(DataInput in) throws IOException {
            firstnum = in.readLong();
            secondnum = in.readLong();
        }
    }

    private static class MyGroupingComparator implements
            RawComparator<MyNewKey> {

        /*
         * 基本分组规则：按第一列firstNum进行分组
         */
        @Override
        public int compare(MyNewKey key1, MyNewKey key2) {
            return (int) (key1.firstnum - key2.firstnum);
        }

        /*
         * @param b1 表示第一个参与比较的字节数组
         *
         * @param s1 表示第一个参与比较的字节数组的起始位置
         *
         * @param l1 表示第一个参与比较的字节数组的偏移量
         *
         * @param b2 表示第二个参与比较的字节数组
         *
         * @param s2 表示第二个参与比较的字节数组的起始位置
         *
         * @param l2 表示第二个参与比较的字节数组的偏移量
         */
        @Override
        public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
            return WritableComparator.compareBytes(b1, s1, 8, b2, s2, 8);
        }

    }

    // 输入文件目录
    public static final String INPUT_PATH = "hdfs://master:8020/tmp/mysort/input/mysort.txt";
    // 输出文件目录
    public static final String OUTPUT_PATH = "hdfs://master:8020/tmp/mysort/output/";

    @Override
    public int run(String[] args) throws Exception {
        // 首先删除输出目录已生成的文件
        FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), getConf());
        Path outPath = new Path(OUTPUT_PATH);
        if(fileSystem.exists(outPath)) {
            fileSystem.delete(outPath, true);
        }

        Job wcjob = Job.getInstance(getConf(), "MySortJob");
        //指定我这个job所在的jar包
        //		wcjob.setJar("/home/hadoop/wordcount.jar");
        wcjob.setJarByClass(MySortJob.class);

        wcjob.setMapperClass(MySortJob.MySortMapper.class);
        wcjob.setReducerClass(MySortJob.MySortReduce.class);
        //设置我们的业务逻辑Mapper类的输出key和value的数据类型
        wcjob.setMapOutputKeyClass(MyNewKey.class);
        wcjob.setMapOutputValueClass(LongWritable.class);
        //设置我们的业务逻辑Reducer类的输出key和value的数据类型
        wcjob.setOutputKeyClass(Text.class);
        wcjob.setOutputValueClass(LongWritable.class);
        // 设置自定义分组规则
        wcjob.setGroupingComparatorClass(MyGroupingComparator.class);

        //reduce tasn number
        wcjob.setNumReduceTasks(1);

        //指定要处理的数据所在的位置
        org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(wcjob, INPUT_PATH);
//        for (int i = 0; i < otherArgs.length - 1; ++i) {
//            FileInputFormat.addInputPath(wcjob, new Path(otherArgs[i]));
//        }
        //指定处理完成之后的结果所保存的位置
        FileOutputFormat.setOutputPath(wcjob, new Path(OUTPUT_PATH));
//        FileOutputFormat.setOutputPath(wcjob,
//                new Path(otherArgs[otherArgs.length - 1]));

        //向yarn集群提交这个job
        boolean res = wcjob.waitForCompletion(true);
        System.exit(res ? 0 : 1);



        return 0;
    }

    public static void main(String[] args) {
        //busincode为1开头 就是确认数据
        String busincode = "122";
        if(StringUtils.startsWith(busincode, "1")) {
            System.out.println(("0"+ busincode.substring(1)));
        } else {
            System.out.println((busincode));
        }


//        Configuration conf = new Configuration();
//        try {
//            int res = ToolRunner.run(conf, new MySortJob(), args);
//            System.exit(res);
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
    }
}
