package org.weishe.hbase.department;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class Count {
	static String tableName = "t_department";
	static Text okey = new Text("count");
	static IntWritable ovalue = new IntWritable(1);

	public static class CountMapper extends TableMapper<Text, IntWritable> {
		@Override
		protected void map(ImmutableBytesWritable key, Result value, Context context)
				throws IOException, InterruptedException {
			context.write(okey, ovalue);
		}

	}

	public static class CountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
		private int sum = 0;

		@Override
		protected void reduce(Text arg0, Iterable<IntWritable> arg1,
				Reducer<Text, IntWritable, Text, IntWritable>.Context arg2) throws IOException, InterruptedException {
			for (IntWritable i : arg1) {
				sum = sum + i.get();
			}
			arg2.write(new Text("count"), new IntWritable(sum));
		}

	}

	public static void main(String[] args) throws Exception {
		Configuration config = HBaseConfiguration.create();
		/// Users/chenbiao/Desktop/hadoop.jar
		config.set("mapred.jar", "/Users/chenbiao/Desktop/hadoop.jar");
		String zk_list = "hadoop,hadoop11,hadoop12";
		config.set("hbase.zookeeper.quorum", zk_list);
		Job job = Job.getInstance(config);
		job.setJobName("count");
		job.setJarByClass(Count.class); // class that contains mapper
		Scan scan = new Scan();
		scan.setCaching(500); // 1 is the default in Scan, which will be bad for
								// MapReduce

		scan.setCacheBlocks(false); // don't set to true for MR jobs
		// set other scan attrs

		TableMapReduceUtil.initTableMapperJob(tableName, // input HBase table
															// name
				scan, // Scan instance to control CF and attribute selection
				CountMapper.class, // mapper
				Text.class, // mapper output key
				IntWritable.class, // mapper output value
				job);
		job.setCombinerClass(CountReducer.class);
		job.setReducerClass(CountReducer.class);
		// job.setOutputFormatClass(NullOutputFormat.class);
		String outputPath = "/ouput/hbase/count5";
		Path op = new Path(outputPath);
		// if (fs.exists(op)) {
		// fs.delete(op, true);
		// }
		FileOutputFormat.setOutputPath(job, op);

		boolean b = job.waitForCompletion(true);
		if (!b) {
			throw new IOException("error with job!");
		}
	}

}
