package cn.com.mrAdv04;

import java.io.IOException;

import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

/**
 * S01 75.5 <"S01", 75.5>
 * 
 * S01 45.5 <"S01", 45.5>
 * 
 * S01 71.5 <"S01", 71.5>
 * 
 * @author yingfing
 * @date 2020年11月21日 @time 上午11:01:41
 */

public class TokenizerMapper extends
		Mapper<LongWritable, Text, Text, DoubleWritable> {

	Text k = new Text();
	DoubleWritable v = new DoubleWritable();

	/**
	 * map 中的值设置成<"1001 \t 2001",1>
	 */
	@Override
	protected void map(LongWritable key, Text value, Context context)
			throws IOException, InterruptedException {

		k.set(value.toString().split("\t")[0]);
		v.set(Double.parseDouble(value.toString().split("\t")[1]));
		context.write(k, v);
	}
}
