package org.robby.hbase.cdr;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;

/**
 * 统计每日话单量
 * 
 * @author Administrator
 * 
 */
public class DailyCdr {
	static String tab_cdr_name = "tab_cdr";
	static String tab_cdr_daily_name = "tab_cdr_daily";
	
	// 定义tablemapper的输出类型
	static class Mapper extends TableMapper<ImmutableBytesWritable, IntWritable> {
		private int numRecords = 0;
		private static final IntWritable one = new IntWritable(1);

		@Override
		protected void map(ImmutableBytesWritable row, Result values, Context context) throws IOException, InterruptedException {
			KeyValue kv = values.getColumnLatest(Bytes.toBytes("data"), null);
			CdrPro.SmCdr cdr = CdrPro.SmCdr.parseFrom(kv.getValue());
			String ts = cdr.getTimestamp();
			ImmutableBytesWritable userkey = new ImmutableBytesWritable(ts.getBytes(), 0, 8);
			try {
				context.write(userkey, one);
			} catch (Exception e) {
				e.printStackTrace();
			}
			numRecords++;
			if (numRecords % 1000 == 0) {
				context.setStatus("mapper processed " + numRecords + " records so far.");
			}
		}

	}

	public static class Reducer extends TableReducer<ImmutableBytesWritable, IntWritable, ImmutableBytesWritable> {

		@Override
		// 要把org.apache.hadoop.mapreduce.Reducer.Context改为Context即可使用。
		protected void reduce(ImmutableBytesWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
			int sum = 0;
			for (IntWritable val : values) {
				sum += val.get();
			}
			Put put = new Put(key.get());
			put.add(Bytes.toBytes("data"), null, Bytes.toBytes(sum));
			try {
				context.write(key, put);
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}

	public static void createTable(Configuration config) throws Exception {
		HBaseAdmin admin = new HBaseAdmin(config);
		if (admin.tableExists(tab_cdr_daily_name)) {
			admin.disableTable(tab_cdr_daily_name);
			admin.deleteTable(tab_cdr_daily_name);
		}
		HTableDescriptor tableDes = new HTableDescriptor(tab_cdr_daily_name);
		HColumnDescriptor family = new HColumnDescriptor("data");
		family.setMaxVersions(1);
		tableDes.addFamily(family);
		admin.createTable(tableDes);
		admin.close();
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = HBaseConfiguration.create();
		// conf.set("hadoop.tmp.dir", "/home/conkeyn/hadoop_var/tmp");
		conf.set("hbase.zookeeper.quorum", "hadoop.main");
		createTable(conf);
		Job job = new Job(conf, "daily_report");
		job.setJarByClass(DailyCdr.class);

		Scan scan = new Scan();
		TableMapReduceUtil.initTableMapperJob(tab_cdr_name, scan, Mapper.class, ImmutableBytesWritable.class, IntWritable.class, job);
		TableMapReduceUtil.initTableReducerJob(tab_cdr_daily_name, Reducer.class, job);
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}
}
