package com.sunsys.mapreduce.poc1.filerecords.validation;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;

public class FileRecordsValidationMapperDriver  extends Configured implements Tool {

	
	@Override
	public int run(String[] args) throws Exception {
		String inputFile = "/home/ubuntu/workspace-info/CountersExample/input/";
		String outputPath = "/home/ubuntu/workspace-info/CountersExample/output/1";
//		String inputFile = args[0];
//		String outputPath = args[1];

		Configuration conf=new Configuration();
//		conf.set("dfs.block.size", "4096");
		conf.set("mapred.max.split.size", "4096");
		
		Job job = new Job(conf, "RecordsCountValidationJob");
		job.setJarByClass(FileRecordsValidationMapperDriver.class);
		job.setMapperClass(FileRecordsValidationMapper.class);
		//TODO may have to set the reducer
		
		FileInputFormat.addInputPath(job, new Path(inputFile));
		FileOutputFormat.setOutputPath(job, new Path(outputPath));

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		
//		job.setOutputKeyClass(Text.class);
//		job.setOutputValueClass(Text.class);

		boolean result = job.waitForCompletion(true);
		return 0;
	}
	
	public static void main(String []args) throws Exception {
		 FileRecordsValidationMapperDriver driver = new FileRecordsValidationMapperDriver();
		 driver.run(args);
		 System.out.println("Thank you");
	}
}
