package mrunit;

import com.ailk.oci.ocnosql.common.rowkeygenerator.GenRKCallBackHBImpl;
import com.ailk.oci.ocnosql.common.rowkeygenerator.MD5RowKeyGenerator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
 * Created by zhaopan on 2017/6/20.<br>
 *
 * @Description:
 */
public class copy
{

    private static class WordCountMapper extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put>
    {

        private Text wordText = new Text();
        private final static List<String> columns = new ArrayList<String>();

        static
        {
            columns.add("BTIME");
            columns.add("ETIME");
            columns.add("PROTOCOLID");
            columns.add("EVENTID");
            columns.add("SPCKIND");
            columns.add("MSCCODE");
            columns.add("BSCCODE");
            columns.add("LAC");
            columns.add("CI");
            columns.add("OLAC");
            columns.add("OCI");
            columns.add("DLAC");
            columns.add("DCI");
            columns.add("FIRSTLAC");
            columns.add("FIRSTCI");
            columns.add("LASTLAC");
            columns.add("LASTCI");
            columns.add("DRNCID");
            columns.add("CALLINGNUM");
            columns.add("CALLEDNUM");
            columns.add("CALLINGIMSI");
            columns.add("CALLEDIMSI");
            columns.add("CALLINGIMEI");
            columns.add("CALLEDIMEI");
            columns.add("EVENTRESULT");
            columns.add("HANDOUTREQUIRECAUSE");
            columns.add("HANDOUTOFFSET");
            columns.add("HANDOUTRSPOFFSET");
            columns.add("HANDINOFFSET");
            columns.add("HANDINRSPOFFSET");
            columns.add("HANDOUTSTATUS");
            columns.add("HANDINSTATUS");
            columns.add("LUSTATUS");
            columns.add("HOFLAG");
            columns.add("ALERT_TIME");
            columns.add("CONN_TIME");
            columns.add("CALL_DUR");
        }

        @Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException
        {
            String line = value.toString();
            String[] wordArray = line.split(",");
            StringBuffer sb = new StringBuffer();
            if (wordArray.length == 37)
            {
                String phone = wordArray[18];
                String beginTime = wordArray[0];
                System.out.println("phone:" + phone);
                String md5 = new MD5RowKeyGenerator().generatePrefix(phone).toString();
                String rowKey = new GenRKCallBackHBImpl().callback(md5 + "" + phone + "" + beginTime, line, null);//
                System.out.println("rowKey" + rowKey);
                byte[] newrowKey = Bytes.toBytes(rowKey);
                ImmutableBytesWritable rowKeyWritable = new ImmutableBytesWritable(newrowKey);
                byte[] family = Bytes.toBytes("0");
                Put put = new Put(newrowKey);
                for(int i=0;i<columns.size();i++){
                    byte[] qualifier = Bytes.toBytes(columns.get(i));
                    byte[] hbaseValue = Bytes.toBytes(wordArray[i]);
                    put.addColumn(family, qualifier, hbaseValue);
                }
                context.write(rowKeyWritable, put);
            }
        }
    }
    public static class HFileLoader {
        public static void doBulkLoad(String pathToHFile, String tableName,Configuration configuration){
            try {
//                Configuration configuration = new Configuration();
//                HBaseConfiguration.addHbaseResources(configuration);
                LoadIncrementalHFiles loadFfiles = new LoadIncrementalHFiles(configuration);
                HTable hTable = new HTable(configuration, tableName);//指定表名
                loadFfiles.doBulkLoad(new Path(pathToHFile), hTable);//导入数据
                System.out.println("Bulk Load Completed..");
            } catch(Exception exception) {
                exception.printStackTrace();
            }

        }

    }
    public static void main(String[] args) throws Exception
    {
        Configuration hadoopConfiguration = new Configuration();
        hadoopConfiguration.set("hbase.security.authentication","kerberos");
        hadoopConfiguration.set("hbase.zookeeper.quorum","hbbdch-dn-01:2181,hbbdch-nn-01:2181,hbbdch-nn-02:2181");
        hadoopConfiguration.set("zookeeper.znode.parent","/services/slider/users/client/hbase_1007_3");
        hadoopConfiguration.set("hbase.master.kerberos.principal","client/_HOST@HBBDCH.COM");
        hadoopConfiguration.set("hbase.regionserver.kerberos.principal", "client/_HOST@HBBDCH.COM");
        hadoopConfiguration.setInt("hbase.mapreduce.bulkload.max.hfiles.perRegion.perFamily", 5000);
        hadoopConfiguration.set("hbase.fs.tmp.dir", "/tmp/hbase-staging");
        String[] dfsArgs = new GenericOptionsParser(hadoopConfiguration, args).getRemainingArgs();
        System.out.println(dfsArgs[0]);
        System.out.println(dfsArgs[1]);
        System.out.println(dfsArgs[2]);
        if(dfsArgs.length!=3){
            System.out.println("参数补全");
            System.exit(-1);
        }
        Job convertWordCountJobOutputToHFileJob = Job.getInstance(hadoopConfiguration, "wordCount_bulkload");
        convertWordCountJobOutputToHFileJob.setJarByClass(copy.class);
        convertWordCountJobOutputToHFileJob.setMapperClass(WordCountMapper.class);
        convertWordCountJobOutputToHFileJob.setInputFormatClass(TextInputFormat.class);
        convertWordCountJobOutputToHFileJob.setMapOutputKeyClass(ImmutableBytesWritable.class);
        convertWordCountJobOutputToHFileJob.setMapOutputValueClass(Put.class);
        FileInputFormat.addInputPaths(convertWordCountJobOutputToHFileJob, dfsArgs[0]);
        FileOutputFormat.setOutputPath(convertWordCountJobOutputToHFileJob, new Path(dfsArgs[1]));
        //创建HBase的配置对象
        Configuration hbaseConfiguration = HBaseConfiguration.create(hadoopConfiguration);
        Connection connection = ConnectionFactory.createConnection(hbaseConfiguration);
        TableName tableName = TableName.valueOf(dfsArgs[2]);
        HFileOutputFormat2.configureIncrementalLoad(convertWordCountJobOutputToHFileJob, connection.getTable(tableName), connection.getRegionLocator(tableName));
        convertWordCountJobOutputToHFileJob.waitForCompletion(true);
        if (convertWordCountJobOutputToHFileJob.isSuccessful()){
            HFileLoader.doBulkLoad(dfsArgs[1], dfsArgs[2],hbaseConfiguration);//导入数据
            System.exit(1);
        } else {
            System.exit(0);
        }
    }
}