package mr.scan;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.Mapper;

import util.LongPair;

public class FirstMapper extends
		Mapper<LongWritable, Text, IntWritable, LongPair> {

	private static final Log LOG = LogFactory.getLog(FirstMapper.class);
	private final static IntWritable one = new IntWritable(1);

	private long chunkey = -1;
	private long sum = 1;

	@Override
	protected void cleanup(Context context) throws IOException,
			InterruptedException {
		context.write(one, new LongPair(chunkey, sum));
	}

	@Override
	public void map(LongWritable key, Text line, Context context)
			throws IOException, InterruptedException {
		String aline = line.toString().trim();
		if (aline.isEmpty())
			return;

 
		StringTokenizer itr = new StringTokenizer(aline);
		while (itr.hasMoreTokens()) {
			sum *= Integer.parseInt(itr.nextToken().trim());
		}

		// chunkey = key.get();
		// context.write(one, new IntPair((int) key.get(), sum));

	}

	@Override
	protected void setup(Context context) throws IOException,
			InterruptedException {
		getChunkey(context);
	}

	public long getChunkey(Context reporter) {
		String spl = reporter.getInputSplit().toString();
		LOG.info("getChunkey ================ " + spl);
		if (spl != null && spl.length() > 0) {
			String[] offset = spl.substring(spl.lastIndexOf(":")+1).split("\\+");
			chunkey = Long.parseLong(offset[0]);
			// nextChunkey = Long.parseLong(offset[1]);
		}
		return chunkey;
	}
}