package com.niit.Hbase.woedcount;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;

import javax.xml.soap.Text;
import java.io.IOException;

/**
 * @author:Ys
 * @date: 2022年08月10日 14:36
 * @desc:
 */
public class WordCountDriver {

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        //配置文件对象
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum","node1");

        Job job = Job.getInstance(conf);
        job.setJarByClass(WordCountDriver.class);

        //scan 查询记录  --> 先将word 表中的数据进行扫描，作为数据源
        Scan scan = new Scan();
        //获取列簇 列
        scan.addColumn(Bytes.toBytes("content"),Bytes.toBytes("info"));
        //设置job的读取输入的表，Mapppe类的输入K V类型
        TableMapReduceUtil.initTableMapperJob("BD2:word",scan,WordCountMapper.class, Text.class, IntWritable.class,job);
        //设置输出类型
        TableMapReduceUtil.initTableReducerJob("BD2:result",WordCountReduce.class,job);


        boolean resultFlag = job.waitForCompletion(true);
        //程序退出
        System.exit(resultFlag ? 0 :1);
    }
}
