package com.niit.hadoop.maxandmin;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
//这是，mapper，采用TextInputFormat
public class MaxAndMinMapper extends Mapper<LongWritable, Text, MaxAndMinBean, NullWritable> {
    //1：声明三个MAXAndMinBean的三个属性，方便我们后期封装，封装成MaxAndMinBean
    //2：long min = long.MAX_VALUE最大值
    private long min = Long.MAX_VALUE;
    private long max = Long.MIN_VALUE;
    private int total = 0;
    //当前只有一个map所以执行reduce一次，如果有n个map，就执行n次
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        long currentValue = 0;

        try {
            currentValue = Long.parseLong(value.toString());
        } catch (NumberFormatException e) {
            return;
        }

        if (currentValue < min) {
            min = currentValue;
        }

        if (currentValue > max) {
            max = currentValue;
        }

        total++;
    }

    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        MaxAndMinBean keyOut = new MaxAndMinBean(min, max, total);
        context.write(keyOut, NullWritable.get());
    }
}
