package com.whoami.dataplatform.tag_combine.hbase.load;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.FileInputStream;
import java.util.Properties;


@SuppressWarnings("Duplicates")
public class HFileWriterJob extends Configured implements Tool {
    /**
     * 给MR框架的一堆配置, 文件里面的内容会被set到mr conf里
     */
    static final String PARAMS_MR_PROP = "mr_properties";
    static final String TABLE_NAME = "test_tu";
    static final String CF_DEFAULT = "cf1";

    @Override
    public int run(String[] strings) throws Exception {
        Configuration conf = getConf();
        String mrProFile = conf.get(PARAMS_MR_PROP);
        if (StringUtils.isNotEmpty(mrProFile)) {
            Properties mrProperties = new Properties();
            mrProperties.load(new FileInputStream(mrProFile));
            for (Object key : mrProperties.keySet()) {
                conf.set(key.toString(), mrProperties.get(key).toString());
            }
        }
        conf = HBaseConfiguration.create(conf);

        String inputPath = conf.get("input");
        String output = conf.get("hfile_output");

        Job job = Job.getInstance(conf);
        job.setJarByClass(HFileWriterJob.class);

        job.setMapperClass(HFileWriterMapper.class);
        job.setMapOutputKeyClass(ImmutableBytesWritable.class);
        job.setMapOutputValueClass(Put.class);

        job.setInputFormatClass(SequenceFileAsBinaryInputFormat.class);
        FileInputFormat.addInputPath(job, new Path(inputPath));

        FileOutputFormat.setOutputPath(job, new Path(output));

        Connection connection = ConnectionFactory.createConnection(conf);
        TableName tableName = TableName.valueOf(TABLE_NAME);
        HFileOutputFormat2.configureIncrementalLoad(job,
                connection.getTable(tableName), connection.getRegionLocator(tableName));

        job.waitForCompletion(true);
        if (job.isSuccessful()) {
            // 批量导入hbase
            doBulkLoad(output, TABLE_NAME, conf, connection);
            return 0;
        } else {
            return 1;
        }
    }

    public static void doBulkLoad(String pathToHFile, String tableName, Configuration conf, Connection connection) throws Exception {
        HBaseConfiguration.addHbaseResources(conf);
        LoadIncrementalHFiles loadFiles = new LoadIncrementalHFiles(conf);

        System.out.println("Bulk Load Starting..");

        TableName table = TableName.valueOf(tableName);
        loadFiles.doBulkLoad(new Path(pathToHFile),
                connection.getAdmin(),
                connection.getTable(table),
                connection.getRegionLocator(table));

        System.out.println("Bulk Load Completed..");
    }

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new Configuration(), new HFileWriterJob(), args));
    }
}
