package com.test.conf;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SkipBadRecords;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class MyJobSkipBadRecords2 extends Configured implements Tool {

	public static class MapClass extends MapReduceBase implements
			Mapper<Text, Text, IntWritable, Text> {

		private static int cnt = 1; 
		
		static enum ClaimsCounters {MISSING, QUOTED};
		
		public void map(Text key, Text value,
				OutputCollector<IntWritable, Text> output, Reporter reporter)
				throws IOException {
			
			reporter.setStatus("map" + cnt);
			System.out.println("map" + cnt);
			System.err.println("map" + cnt++);
			
			reporter.incrCounter("A", "B", 1);
			reporter.incrCounter("A", "C", 1);
			reporter.incrCounter("A", "C", 1);
			reporter.incrCounter(ClaimsCounters.MISSING , 1);
			reporter.incrCounter(ClaimsCounters.QUOTED , 1);
			IntWritable value2 = new IntWritable();
			value2.set(Integer.valueOf(value.toString()));
			output.collect(value2, key);
			
		}
	}

	public static class Reduce extends MapReduceBase implements
			Reducer<IntWritable, Text, IntWritable, Text> {

		private static int cnt = 1; 
		
		public void reduce(IntWritable key, Iterator<Text> values,
				OutputCollector<IntWritable, Text> output, Reporter reporter)
				throws IOException {

			
			
			String csv = "";
			while (values.hasNext()) {
				if (csv.length() > 0)
					csv += ",";
				csv += values.next().toString();
			}
			output.collect(key, new Text(csv));
			reporter.setStatus("r" + cnt);
			System.out.println("r" + cnt);
			System.err.println("r" + cnt++);
			reporter.incrCounter("A", "C", 1);
		}
	}

	@Override
	public int run(String[] args) throws Exception {

		Configuration conf = getConf();
		SkipBadRecords.setMapperMaxSkipRecords(conf, Long.MAX_VALUE);
		
		JobConf job = new JobConf(conf, MyJobSkipBadRecords2.class);
		SkipBadRecords.setSkipOutputPath(job, null);
//		job.setMaxMapAttempts(4);
		Path in = new Path(args[0]);
        Path out = new Path(args[1]);
        FileInputFormat.setInputPaths(job, in);
        FileOutputFormat.setOutputPath(job, out);
        
        job.setJobName("jjjob");
        job.setMapperClass(MapClass.class);
        job.setReducerClass(Reduce.class);
        
        job.setInputFormat(KeyValueTextInputFormat.class);
        job.setOutputFormat(TextOutputFormat.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);
        job.set("key.value.separator.in.input.line", ",");
        
        JobClient.runJob(job);
        
		return 0;
	}

	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new MyJobSkipBadRecords2(), args);
		System.exit(res);
	}

}
