package com.leo.hbase.mapreduce;

import org.apache.commons.cli.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.GenericOptionsParser;

import java.io.IOException;

/**
 * @author leo.jie (weixiao.me@aliyun.com)
 * @version 1.0
 * @organization CC
 * @website https://www.jlpyyf.com
 * @date 2019-11-03 17:28
 * @since 1.0
 */
public class RowCounter {
    private static final Log LOG = LogFactory.getLog(RowCounter.class);
    static final String NAME = "RowCounter";
    private static final String DEFAULT_SAVE_COUNT_TABLE_NAME = "hbase_business_row_count_table";
    private static final int DEFAULT_SCAN_CACHE = 5000;
    private static final String FAMILY_NAME = "info";
    private static final String COL_NAME = "count";
    private static final String ROW_COUNTER_TABLE_NAME_KEY = "RowCountTableName";

    public static class RowCountMapper extends TableMapper<Text, LongWritable> {
        static byte[] colNameByte = COL_NAME.getBytes();
        Text t = new Text();
        LongWritable iWrite = new LongWritable(1L);

        @Override
        protected void map(ImmutableBytesWritable key, Result value,
                           Context context) throws IOException, InterruptedException {
            t.set(colNameByte);
            context.write(t, iWrite);
        }
    }

    public static class RowCountReducer extends TableReducer<Text, LongWritable, NullWritable> {
        static byte[] familyByte = FAMILY_NAME.getBytes();
        static byte[] colNameByte = COL_NAME.getBytes();

        @Override
        protected void reduce(Text key, Iterable<LongWritable> values,
                              Context context) throws IOException, InterruptedException {
            long count = 0;
            for (LongWritable value : values) {
                count += value.get();
            }
            String table = context.getConfiguration().get(ROW_COUNTER_TABLE_NAME_KEY);
            Put put = new Put(Bytes.toBytes(table));
            put.addColumn(familyByte, colNameByte, Bytes.toBytes(count));
            context.write(NullWritable.get(), put);
        }
    }

    public static class RowCountCombiner extends Reducer<Text, LongWritable, Text, LongWritable> {

        @Override
        protected void reduce(Text key, Iterable<LongWritable> values, Context context)
                throws IOException, InterruptedException {
            long i = 0;
            for (LongWritable val : values) {
                i += val.get();
            }
            context.write(key, new LongWritable(i));
        }
    }

    private static CommandLine parseArgs(String[] args) {
        Options options = new Options();
        Option o = new Option("t", "table", true, "需要统计统计的表名！");
        o.setArgName("table-name");
        o.setRequired(true);
        options.addOption(o);

        o = new Option("o", "outputTable", true,
                "需要保存统计行数的表名，不输入默认保存在:" + DEFAULT_SAVE_COUNT_TABLE_NAME + "中！");
        o.setArgName("output-table");
        o.setRequired(false);
        options.addOption(o);

        o = new Option("s", "scanCache", true, "scan缓存大小，不设置默认为：" + DEFAULT_SCAN_CACHE);
        o.setArgName("scan-cache");
        o.setRequired(false);
        options.addOption(o);

        CommandLineParser parser = new PosixParser();
        CommandLine cmd = null;
        try {
            cmd = parser.parse(options, args);
        } catch (Exception e) {
            LOG.error("LEO TIP ERROR: " + e.getMessage() + "\n");
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp(NAME + " ", options, true);
            System.exit(-1);
        }
        return cmd;
    }

    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
        CommandLine cmd = parseArgs(otherArgs);
        String tableName = cmd.getOptionValue("t");
        conf.set(ROW_COUNTER_TABLE_NAME_KEY, tableName);
        String outputTableName = DEFAULT_SAVE_COUNT_TABLE_NAME;
        if (cmd.hasOption("o")) {
            outputTableName = cmd.getOptionValue("o");
        }
        int scanCache = DEFAULT_SCAN_CACHE;
        if (cmd.hasOption("s")) {
            scanCache = Integer.parseInt(cmd.getOptionValue("s"));
        }

        LOG.info("当前正在统计的表: " + tableName);
        LOG.info("统计结果输出在: " + outputTableName);
        LOG.info("scan缓存的行数: " + scanCache);

        Job job = Job.getInstance(conf, RowCounter.class.getName() + "-" + tableName);
        job.setJarByClass(RowCounter.class);
        job.setMapperClass(RowCountMapper.class);
        job.setReducerClass(RowCountReducer.class);
        job.setCombinerClass(RowCountCombiner.class);
        job.setNumReduceTasks(1);

        Scan scan = new Scan();
        scan.setCaching(scanCache);
        scan.setCacheBlocks(false);

        TableMapReduceUtil.initTableMapperJob(tableName, scan, RowCountMapper.class, Text.class, LongWritable.class, job);
        TableMapReduceUtil.initTableReducerJob(outputTableName, RowCountReducer.class, job);
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

