package com.hbase.reduce;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        Text text = new Text();
        IntWritable intWritable = new IntWritable();
        // 对每行数据拆分处理
        String[] split = value.toString().split("\\s+");
        for (String s : split) {
            text.set(s);
            intWritable.set(1);
            context.write(text, intWritable);
        }
    }
}