package cn.lsh.main;

import cn.lsh.mapper.HbaseMaxMapper;
import cn.lsh.reducer.HbaseMaxReduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

public class HbaseMaxDriver {

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration(true);

		Job job = Job.getInstance(conf, "hbase_max");
		job.setJarByClass(HbaseMaxDriver.class);

		Scan scan = new Scan();
		scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("count"));
		//设置Mapper从hbase中读取表数据
		TableMapReduceUtil.initTableMapperJob("total", scan, HbaseMaxMapper.class, Text.class, Text.class, job, false);
		//设置Reduce结果写入hbase表中
		TableMapReduceUtil.initTableReducerJob("total", HbaseMaxReduce.class, job);

		job.waitForCompletion(true);
	}
}
