package mr.scan;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import util.LongPair;

public class FirstMapper extends
		Mapper<LongWritable, Text, IntWritable, LongPair> {

	private final static IntWritable one = new IntWritable(1);

	private long lastLineKey = 0;
	private long sum = 0;

	@Override
	protected void cleanup(Context context) throws IOException,
			InterruptedException {
		context.write(one, new LongPair(lastLineKey, sum));
	}

	@Override
	public void map(LongWritable key, Text line, Context context)
			throws IOException, InterruptedException {
		String aline = line.toString().trim();
		if (aline.isEmpty())
			return;

		// String[] eles = aline.split(" ");
		// // int sum = 0;
		// for (int i = 0; i < eles.length; i++) {
		// sum += Integer.parseInt(eles[i]);
		// }
		StringTokenizer itr = new StringTokenizer(aline);
		while (itr.hasMoreTokens()) {
			sum += Integer.parseInt(itr.nextToken().trim());
		}

		lastLineKey = key.get();
		// context.write(one, new IntPair((int) key.get(), sum));

	}

	@Override
	protected void setup(Context context) throws IOException,
			InterruptedException {
		// get
	}
}