package com.niit.sequenceFile.write;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * Date:2024/10/11
 * Author：Ys
 * Description:   读取普通文件  KeyIN  IntWritable(行号),  ValueIN Text（每一行的内容）,
 */
public class WriteSeqMapper extends Mapper<LongWritable, Text, NullWritable,Text> {

    private NullWritable outKey;

    // 用于初始化 只会执行一次
    @Override
    protected void setup(Mapper<LongWritable, Text, NullWritable, Text>.Context context) throws IOException, InterruptedException {
        this.outKey = NullWritable.get();
    }

    //每一次只会读取文件的一行数据，多次调用    MapReduce
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, NullWritable, Text>.Context context) throws IOException, InterruptedException {
        context.write(outKey,value);  // 写向 reducer类中
    }

    // 只会执行一次, 用于释放资源，或最终一次输出
    @Override
    protected void cleanup(Mapper<LongWritable, Text, NullWritable, Text>.Context context) throws IOException, InterruptedException {
        this.outKey = null;
    }
}
