package cn.com.mr01.lianxi.mr_lianxi_16;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class TokenizerMapper extends Mapper<LongWritable, Text, Text, Text> {

	Text k = new Text();
	Text v = new Text();

	/**
	 * map 中的值设置成<"1001","1001	张三	9001	计算机科学与技术">
	 * 
	 * <"1001","1001	河北省	石家庄	裕华区">
	 */
	@Override
	protected void map(LongWritable key, Text value, Context context)
			throws IOException, InterruptedException {

		// 设置输出的 key
		k.set(value.toString().split("\t")[0]);
		// 设置输出的 value
		v.set(value);
		context.write(k, v);
	}
}
