package cn.com.mrAdv03;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

//key --- key % 100 == 0
//100	jack	M	10000	----->	<100, "100	jack	M	10000">
//200	marry	F	12000	----->	<200, "200	marry	F	12000">
//300	john	M	11000	----->	<300, "300	john	M	11000">

public class TokenizerMapper extends Mapper<LongWritable, Text, MyselfIntWritable, Text>{
	
	MyselfIntWritable myNum = new MyselfIntWritable();
	
	@Override
	protected void map(LongWritable key, Text value, Context context)
			throws IOException, InterruptedException {
		String snum = value.toString().split("\t")[0];
		int num = Integer.parseInt(snum);
		myNum.set(num);
		context.write(myNum, value);
	}

}
