package mapred.maxAndMin.way2;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import java.io.IOException;
public class MaxAndMinJob {

    /**
     * 当前类中，setup，map,cleanup的执行过程是：
     * 假定当前分片有9条数据：
     * 1. setup
     * 2. map * 9
     * 3. cleanup
     */
    static class MapTask extends Mapper<LongWritable, Text, MaxAndMinValue, NullWritable> {

        // 对于查找集合中最大值和最小值的情况，这里的变量名和初始值不对应
        // 这是当前例子的BUG
        long min = Long.MIN_VALUE;
        long max = Long.MAX_VALUE;
        // 所有有效的整型数据的个数
        int total = 0;


        @Override
        protected void cleanup(Context context)
                throws IOException, InterruptedException {
            // 在cleanup方法中，将当前分片中的最大值，
            // 最小值直接通过我们自定义的可序列化对象MaxAndMinValue传递出去
            MaxAndMinValue value = new MaxAndMinValue(min, max, total);
            System.out.println("map:" + value.toString());
            //只有这句话，才能将map的结果输出给shuffle
            context.write(value, NullWritable.get());
        }

        @Override
        protected void setup(Context context) {

        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

            // 对于输入文件的每一行，都会执行以下代码
            long currentValue = 0;
            try {
                // 将每一行的数据，进行强制类型转换
                // 与此同时，对于那些非整型数值，通过异常自动过滤掉
                currentValue = Long.parseLong(value.toString());
            } catch (NumberFormatException e) {
                e.printStackTrace();
                return;
            }

            // 注意：！！！！！查找的是最大值
            if (currentValue > min) {
                min = currentValue;
            }

            // 注意：！！！！！查找的是最小值
            if (currentValue < max) {
                max = currentValue;
            }

            total++;
        }

    }

    /**
     * 与Mapper类类似，输出是在cleanup中实现的，reduce方法只是完成了最大值和最小值的交换
     * 注意：如果当前Reduce有多个的话，reduce函数会执行多次，从多个key中查找最大值和最小值
     */
    static class ReduceTask extends Reducer<MaxAndMinValue, NullWritable, NullWritable, MaxAndMinValue> {
        long min = Long.MIN_VALUE;
        long max = Long.MAX_VALUE;
        int total = 0;

        @Override
        protected void cleanup(Context context)
                throws IOException, InterruptedException {
            // TODO Auto-generated method stub
            MaxAndMinValue value = new MaxAndMinValue(min, max, total);
            context.write(NullWritable.get(), value);
        }

        @Override
        protected void reduce(
                MaxAndMinValue key,
                Iterable<NullWritable> values,
                Context content)
                throws IOException, InterruptedException {

            // 对从Map传过来的MaxAndMinValue的值（也就是key的值）的min和max做了一个交换
            // TODO Auto-generated method stub
            if (key.getMin().get() < max) {
                max = key.getMin().get();
            }
            if (key.getMax().get() > min) {
                min = key.getMax().get();
            }

            total += key.getTotal().get();
            System.out.println("total:" + total);
        }

    }
    public static void main(String[] args) throws Exception {
        // TODO Auto-generated method stub
        Configuration conf = new Configuration();
        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
        if (otherArgs.length != 2) {
            System.err.println("Usage: MaxAndMinJob <in> <out>");
            System.exit(2);
        }
        Job job = Job.getInstance(conf);
        job.setJobName("MaxAndMinValue");
        job.setJarByClass(MaxAndMinJob.class);
        job.setMapperClass(MapTask.class);
        job.setReducerClass(ReduceTask.class);
        job.setMapOutputKeyClass(MaxAndMinValue.class);
        job.setMapOutputValueClass(NullWritable.class);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(MaxAndMinValue.class);
        job.setNumReduceTasks(1);
        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
        try {
            System.exit(job.waitForCompletion(true) ? 0 : 1);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}
