package com.niit.maxandmin;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/*
LongWritable:读取文件的偏移量：行数
Text：每行数据，如果是要读取的文件里面都是数字可以直接用LongWritable或IntWritable
 */
public class MAXandMinMapper extends Mapper<LongWritable, Text,MaxAndMinValue, NullWritable> {

    private long min;
    private long max;
    private int total;

    //setup:一个Map程序，在执行MAPPER的阶段时候，该方法只会执行一次，且在第一次map（）方法前执行
    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        min = Long.MIN_VALUE;//获得Long类型中最小值
        max = Long.MAX_VALUE;//获得LOnng类型中的最大值
        total = 0;//默认次数为0


    }

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        long currentValue = 0;   //定义当前值为0
        try {
            currentValue = Long.parseLong(value.toString());  //将读文件为Text转换为int

        }catch (NumberFormatException e){
                e.printStackTrace();
                return;//
        }
        //比较数值
        if (currentValue > min){
            min = currentValue;
        }
        if (currentValue < max){
            max = currentValue;
        }
        total++;

    }

    //在最后一次map（）方法之后执行
    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        //输出结果
        MaxAndMinValue maxAndMinValue = new MaxAndMinValue(min,max,total);
        context.write(maxAndMinValue,NullWritable.get());
    }
}
