package com.apex.spark.sink;

import com.apex.spark.SparkEnvironment;
import com.apex.spark.batch.SparkBatchSink;
import com.apex.spark.stream.SparkStreamingSink;
import com.apex.spark.structuredstream.SparkStructuredStreamingSink;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.spark.JavaHBaseContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.types.StructField;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class HbaseSink implements SparkBatchSink, SparkStreamingSink, SparkStructuredStreamingSink {
    private Logger logger = LoggerFactory.getLogger(HbaseSink.class);
    private Config config;
    private boolean isKerberos;
    private String krbPath;
    private String jaasFile;
    private String principal;
    private String keytab;
    private String zookeeperQuorum;
    private String masterPrincipal;
    private String regionserverPrincipal;
    private String znodeParent;
    private String confPath;
    private static String rowKeyFields;
    private static String family;
    private String table;

    @Override
    public void outputBatch(SparkEnvironment env, Dataset<Row> dataset) {
        hbaseWriter(env,dataset);
    }

    @Override
    public void outputStreaming(SparkEnvironment env, Dataset<Row> dataset) {
        hbaseWriter(env,dataset);
    }

    @Override
    public void outputStructuredStreaming(SparkEnvironment env, Dataset<Row> dataset) {
        hbaseWriter(env,dataset);
    }

    /**
     * 执行参数获取
     * @param plugin 执行环境
     */
    @Override
    public void prepare(SparkEnvironment plugin) {
        if (config.hasPath(ConfigKeyName.HBASE_ZOOKEEPER_QUORUM)){
            zookeeperQuorum = config.getString(ConfigKeyName.HBASE_ZOOKEEPER_QUORUM);
        }
        if (config.hasPath(ConfigKeyName.HBASE_ZNODEPARENT)){
            znodeParent = config.getString(ConfigKeyName.HBASE_ZNODEPARENT);
        }
        if (config.hasPath(ConfigKeyName.HBASE_CONF_PATH)){
            confPath = config.getString(ConfigKeyName.HBASE_CONF_PATH);
        }
        if (config.hasPath(ConfigKeyName.HBASE_ROWKEY)){
            rowKeyFields = config.getString(ConfigKeyName.HBASE_ROWKEY);
        }
        if (config.hasPath(ConfigKeyName.HBASE_FAMILY)){
            family = config.getString(ConfigKeyName.HBASE_FAMILY);
        }
        if (config.hasPath(ConfigKeyName.HBASE_TABLE_NAME)){
            table = config.getString(ConfigKeyName.HBASE_TABLE_NAME);
        }
        isKerberos = config.getBoolean(ConfigKeyName.HBASE_IS_KERBEROS);
        if (isKerberos){
          masterPrincipal = config.getString(ConfigKeyName.HBASE_MASTERP_RINCIPAL);
          regionserverPrincipal = config.getString(ConfigKeyName.HBASE_REGIONSERVER_PRINCIPAL);
          jaasFile = config.getString(ConfigKeyName.HBASE_JAAS_FILE);
          krbPath = config.getString(ConfigKeyName.HBASE_KRB_PATH);
          keytab = config.getString(ConfigKeyName.HBASE_KEYTAB);
          principal = config.getString(ConfigKeyName.HBASE_USER_PRINCIPAL);
        }

    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    /**
     * 数据写入方法
     * @param env  SparkEnvironment
     * @param dataset Dataset<Row>
     */
    private void hbaseWriter(SparkEnvironment env , Dataset<Row> dataset){
        JavaSparkContext jsc = new JavaSparkContext(env.getSparkSession().sparkContext());
        if (isKerberos){
            Configuration configuration = getHbaseKerberosEnv();
            JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, configuration);
            TableName tableName = TableName.valueOf(table);
            hbaseContext.bulkPut(dataset.javaRDD(),tableName,new PutFunction());
        }else {
            Configuration configuration =  getHbaseEnv();
            JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, configuration);
            TableName tableName = TableName.valueOf(table);
            hbaseContext.bulkPut(dataset.javaRDD(),tableName,new PutFunction());
        }

    }

    /**
     * 初始化Kerberos环境
     */
    private void initKerberosENV(Configuration conf) {
        System.setProperty("java.security.krb5.conf", krbPath);
        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
        System.setProperty("sun.security.krb5.debug", "true");
        System.setProperty("java.security.auth.login.config",jaasFile);
        try {
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
            // 打印kdc debug信息
            logger.info(UserGroupInformation.getCurrentUser()+"");
            logger.info(UserGroupInformation.getLoginUser()+"");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 初始化hbase kerberos环境
     * @return Connection
     */
    private Configuration getHbaseKerberosEnv(){
//        Connection connection = null;
        Configuration configuration = null;
        try {
            // 设置 hbase 登录选项
            configuration = new Configuration();
            configuration.addResource(new Path(confPath + File.separator + "core-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hdfs-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hbase-site.xml"));
            configuration.set("hbase.zookeeper.quorum", zookeeperQuorum);
            configuration.set("hbase.master.kerberos.principal", masterPrincipal);
            configuration.set("hbase.regionserver.kerberos.principal", regionserverPrincipal);
            configuration.set("hbase.security.authentication", "kerberos");
            configuration.set("hadoop.security.authentication", "kerberos");
            configuration.set("zookeeper.znode.parent", znodeParent);
            // 初始化kdc连接
            initKerberosENV(configuration);
//            connection = ConnectionFactory.createConnection(configuration);
        }catch (Exception e){
            e.printStackTrace();
        }
        return configuration;
    }

    private Configuration getHbaseEnv(){
//        Connection connection = null;
        Configuration configuration = null;
        try {
            // 设置 hbase 登录选项
            configuration = new Configuration();
            configuration.set("hbase.zookeeper.quorum", zookeeperQuorum);
            configuration.set("zookeeper.znode.parent", znodeParent);
            configuration.addResource(new Path(confPath + File.separator + "core-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hdfs-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hbase-site.xml"));
//            connection = ConnectionFactory.createConnection(configuration);
        }catch (Exception e){
            e.printStackTrace();
        }
        return configuration;
    }
    public static class PutFunction implements Function<Row, Put> {

        private static final long serialVersionUID = -21642L;

        String hbaseRowFields = rowKeyFields;
        String familyName = family;

        public Put call(Row row) throws Exception {
            List<String> list = new ArrayList<>();
            StringBuilder rowkey = new StringBuilder();
            if (!StringUtils.isBlank(hbaseRowFields)){
                if (hbaseRowFields.split(",").length >= 2){
                    for (String str : hbaseRowFields.split(",")){
                        list.add(row.getAs(str).toString());
                    }
                }

                for (String str: list){
                    rowkey.append(str);
                }
            }else {
                rowkey.append(System.currentTimeMillis());
            }

            String[] cells = row.mkString(",").split(",");
            StructField[] fields = row.schema().fields();
            Put put = new Put(Bytes.toBytes(rowkey.toString()));
            for (int i = 0 ; i < fields.length; i++){
                put.addColumn(Bytes.toBytes(familyName),Bytes.toBytes(fields[i].name()),Bytes.toBytes(cells[i]));
            }
            return put;
        }

    }
}
