package com.zhl.hadoop.mapreduce.combiner;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @program: demos
 * @description: 自己的类型序列化不适用java自带的序列化
 * @author: 刘振华
 * @create: 2020-10-28 10:57
 **/
public class CombinerWordCountMapper extends Mapper<LongWritable, Text,Text,LongWritable> {

	@Override
	protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
		LongWritable time = new LongWritable(1);
		Text wordText = new Text();
		//对每一行的数据进行处理
		String[] words = value.toString().split(" ");
		for (String word : words) {
			wordText.set(word);
			context.write(wordText,time);
		}
	}
}
