package com.pxene.hbasetime;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;

public class TimeCountHBaseTask extends Configured implements Tool {
 
    @Override
    public int run(String[] args) throws Exception {
        String outputPath = "/user/chenjinghui/hbasecount/";
        Configuration conf = getConf();
        conf.set("hbase.zookeeper.property.clientPort", "2181");
        conf.set("hbase.zookeeper.quorum", "dmp01,dmp02,dmp03,dmp04,dmp05");
 
        Job job = Job.getInstance(conf, "count HBase");
        FileSystem fs=FileSystem.get(conf);
        job.setJarByClass(TimeCountHBaseTask.class);
        // 读取HBase表要创建scan对象，并指定要扫描的列簇名
        Scan scan = new Scan();
        scan.addFamily("info".getBytes());
        TableMapReduceUtil.initTableMapperJob("t_prod_weixin_art", scan, HBaseTimeCountMapper.class,
        		LongWritable.class, LongWritable.class, job);
        
        job.setNumReduceTasks(5);
        job.setReducerClass(HBaseTimeCountReducer.class);
       
        //reduce的输出
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(LongWritable.class);
        
        FileOutputFormat.setOutputPath(job, new Path(outputPath));
 
        if(fs.exists(new Path(outputPath))){
        	fs.delete(new Path(outputPath), true);
        }
        return job.waitForCompletion(true) ? 0 : 1;
    }
 
}