package com.sunsys.mapreduce.enumcounter.retrieveInReducer;

import java.io.IOException;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.record.RecordComparator;

public class RecordsCountValidationMapper extends Mapper<LongWritable, Text, Text, Text> {

	private final String HEADER_STARTS_WITH = "HEADER:";
	private final String FOOTER_STARTS_WITH = "FOOTER:";
	private final String RECORDS_COUNT = "records:";
	private String fileName = null;
	
	public RecordsCountValidationMapper() {
	}
	
	@Override
	protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context) throws IOException, InterruptedException {
		/* 
		 * if FileInputFormat is used to specify the Input File format, 
		 * then the InputSplit returned by context.getInputSplit() method can be cast to a FileSplit to access the file information.
		 */
		FileSplit fileSplit = (FileSplit)context.getInputSplit();
		Path  filePaht = fileSplit.getPath();
		fileName = filePaht.getName();
	}
	
	@Override
	protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context) 
			throws IOException, InterruptedException {
		
		String line = value.toString();
		String recordsCount = null;
		if(line.startsWith(FOOTER_STARTS_WITH)) {
			int index = line.indexOf(RECORDS_COUNT);
			if(index != -1) {
				recordsCount = line.substring(index+RECORDS_COUNT.length());
				System.out.println("file name:"+ fileName +"\tfooter records count in map:"+recordsCount);
				
				context.write(new Text(fileName), new Text(recordsCount));
			}
		} else if( ! line.startsWith(HEADER_STARTS_WITH)) {
			/* increment the Records Count  counter associated with the current file */
			Counter counter = context.getCounter(RecordsCount.FILE1_COUNTER);
			counter.increment(1);
		}
	}
}
