package cn.com.mr01.lianxi.mr_lianxi_03;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class TokenizerMapper extends
		Mapper<LongWritable, Text, Text, IntWritable> {

	IntWritable n = new IntWritable();
	Text k = new Text();

	@Override
	protected void map(LongWritable key, Text value, Context context)
			throws IOException, InterruptedException {
		// 业务代码
		// 如何设置 key
		String[] words = value.toString().split(",");
		// 2.将每个单词封装成<k,v>
		for (String word : words) {
			try {
				// 转换
				int num = Integer.parseInt(word);
				// 封装<k,v>
				n.set(num);
				context.write(k, n);
			} catch (Exception e) {
			}
		}
	}
}
