package com.pxene.hbase2hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;

public class Hbase2HbaseTask extends Configured implements Tool {
 
    @Override
    public int run(String[] args) throws Exception {
//        if (args.length != 2) {
//            System.exit(1);
//        }
//        String inputTable = args[0];
//        String outputPath = "/user/chenjinghui/hbaseData4train2/";
        Configuration conf = getConf();
        conf.set("hbase.zookeeper.property.clientPort", "2181");
        conf.set("hbase.zookeeper.quorum", "dmp01,dmp02,dmp03,dmp04,dmp05");
 
        Job job = Job.getInstance(conf, "Hbase2Hbase");
        job.setJarByClass(Hbase2HbaseTask.class);
        // 读取HBase表要创建scan对象，并指定要扫描的列簇名
        Scan scan = new Scan();
        scan.addFamily("article".getBytes());
        TableMapReduceUtil.initTableMapperJob("t_text_categorization", scan, Hbase2HBaseMapper.class,
                Text.class, Text.class, job);
        
        TableMapReduceUtil.initTableReducerJob("t_text_categorization_v2", Hbase2HbaseReducer.class, job);
        
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        
        job.setNumReduceTasks(1);
        
        job.setReducerClass(Hbase2HbaseReducer.class);
        return job.waitForCompletion(true) ? 0 : 1;
    }
 
}