package hbase.mapReduce;
import hbase.mapReduce.Mapper.Mapper1;

import java.io.IOException;

import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;

public class startMapReduce {
	
	@SuppressWarnings("deprecation")
	public static void main(String[] args) {
		HBaseConfiguration conf = new HBaseConfiguration();
        Job job;
		try {
		
			job = new Job(conf, "DateNumber_Doc");
			job.setJarByClass(Mapper.class);
			Scan scan = new Scan();
			String columns = "date";
			scan.addColumns(columns);
			scan.setFilter(new org.apache.hadoop.hbase.filter.ValueFilter());
			TableMapReduceUtil.initTableMapperJob("docWeight", scan, Mapper1.class, ImmutableBytesWritable.class,
			        IntWritable.class, job);
			TableMapReduceUtil.initTableReducerJob("countDocDate", Reducer.class, job);
			System.exit(job.waitForCompletion(true) ? 0 : 1);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

}
