package com.itbaizhan.hbase2hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;


public class Hbase2HbaseMain {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration(true);
        conf.set("mapreduce.framework.name","local");
        conf.set("hbase.zookeeper.quorum","node02,node03,node04");
        Job job = Job.getInstance(conf,"hbase2hbase demo");
        //设置job的入口程序
        job.setJarByClass(Hbase2HbaseMain.class);
        //从hbase中去读取数据
        Scan scan = new Scan();
        //指定查询的列
        scan.addColumn("cf".getBytes(), Bytes.toBytes("line"));
        TableMapReduceUtil.initTableMapperJob(
                "sentence",//从哪张表查询数据
                scan,//表扫描器
                Hbase2HbaseMapper.class,//Mapper类
                Text.class, IntWritable.class,//指定输出的key和value的类型
                job,//指定作业对象
                false);//不需要上传依赖的jar
        //处理后的结果写入到Hbase的表中
        TableMapReduceUtil.initTableReducerJob(
                "wordcount",//处理后的数据写入到hbase的哪张表中
                Hbase2HbaseReducer.class,//指定使用的Reducer类
                job,//对应的job对象
                null,null,null,null,
                false//不要上传依赖的jar包
        );
        job.waitForCompletion(true);
    }
}
