package com.huan.hadoop.mr;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.util.Random;

/**
 * 从文件中读取数据，并封装成 Student 类型
 *
 * @author huan.fu
 * @date 2023/7/16 - 10:52
 */
public class ReadFileMapper extends Mapper<LongWritable, Text, NullWritable, Student> {

    private NullWritable outKey = NullWritable.get();
    private Student outValue = new Student();

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, NullWritable, Student>.Context context) throws IOException, InterruptedException {
        // 读取到的一行数据
        String[] cells = value.toString().split("\\s+");

        long studentId = Long.parseLong(cells[0]) + new Random().nextInt(10000);
        String studentName = cells[1];

        outValue.setStudentId(studentId);
        outValue.setStudentName(studentName);

        context.write(outKey, outValue);
    }
}
