package com.apex.spark.source;

import com.alibaba.fastjson.JSONObject;
import com.apex.spark.SparkEnvironment;
import com.apex.spark.batch.SparkBatchSource;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.spark.JavaHBaseContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.RuntimeConfig;
import org.apache.spark.sql.types.StructField;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Result;

import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;

import org.apache.spark.api.java.function.Function;
import scala.Tuple2;

public class HbaseSource implements SparkBatchSource {
    private final Logger logger = LoggerFactory.getLogger(HbaseSource.class);
    private Config config;
    private boolean isKerberos;
    private String krbPath;
    private String jaasFile;
    private String principal;
    private String keytab;
    private String zookeeperQuorum;
    private String masterPrincipal;
    private String regionserverPrincipal;
    private String znodeParent;
    private String confPath;
    private static String rowData;
    private String table;
    private String method;
    private static int cacheSize;
    private static String startRow;
    private static String stopRow;
    private static JavaSparkContext jsc;
    @Override
    public Dataset<Row> getData(SparkEnvironment environment) {
        jsc = new JavaSparkContext(environment.getSparkSession().sparkContext());
        JavaRDD<Row> javaRDDRow = null;
        if (isKerberos){
            Configuration configuration = getHbaseKerberosEnv();
            JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, configuration);
            TableName tableName = TableName.valueOf(table);
            JavaRDD<String> javaRDD = getData(method,hbaseContext,tableName);
            javaRDDRow = changToRow(javaRDD,environment);

        }else {
            Configuration configuration =  getHbaseEnv();
            JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, configuration);
            TableName tableName = TableName.valueOf(table);
            JavaRDD<String> javaRDD = getData(method,hbaseContext,tableName);
            javaRDDRow = changToRow(javaRDD,environment);
            javaRDDRow.rdd().take(100);
        }
        Dataset<Row> dataset = environment.getSparkSession().createDataFrame(javaRDDRow,environment.getStructType());
        dataset.createOrReplaceTempView(table);
        dataset.show();
        return dataset;
    }

    @Override
    public void prepare(SparkEnvironment plugin) {
        if (config.hasPath(ConfigKeyName.HBASE_ZOOKEEPER_QUORUM)){
            zookeeperQuorum = config.getString(ConfigKeyName.HBASE_ZOOKEEPER_QUORUM);
        }
        if (config.hasPath(ConfigKeyName.HBASE_ZNODEPARENT)){
            znodeParent = config.getString(ConfigKeyName.HBASE_ZNODEPARENT);
        }
        if (config.hasPath(ConfigKeyName.HBASE_CONF_PATH)){
            confPath = config.getString(ConfigKeyName.HBASE_CONF_PATH);
        }
        if (config.hasPath(ConfigKeyName.HBASE_ROWKEY)){
            rowData = config.getString(ConfigKeyName.HBASE_GET_DATA);
        }
        if (config.hasPath(ConfigKeyName.HBASE_TABLE_NAME)){
            table = config.getString(ConfigKeyName.HBASE_TABLE_NAME);
        }
        isKerberos = config.getBoolean(ConfigKeyName.HBASE_IS_KERBEROS);
        if (isKerberos){
            masterPrincipal = config.getString(ConfigKeyName.HBASE_MASTERP_RINCIPAL);
            regionserverPrincipal = config.getString(ConfigKeyName.HBASE_REGIONSERVER_PRINCIPAL);
            jaasFile = config.getString(ConfigKeyName.HBASE_JAAS_FILE);
            krbPath = config.getString(ConfigKeyName.HBASE_KRB_PATH);
            keytab = config.getString(ConfigKeyName.HBASE_KEYTAB);
            principal = config.getString(ConfigKeyName.HBASE_USER_PRINCIPAL);
        }
        if (config.hasPath(ConfigKeyName.HBASE_SCAN_CACHE_SIZE)){
            cacheSize = config.getInt(ConfigKeyName.HBASE_SCAN_CACHE_SIZE);
        }else {
            cacheSize = 10000;
        }
        if (config.hasPath(ConfigKeyName.HBASE_SCAN_START_ROW)){
            startRow = config.getString(ConfigKeyName.HBASE_SCAN_START_ROW);
        }

        if (config.hasPath(ConfigKeyName.HBASE_SCAN_STOP_ROW)){
            stopRow = config.getString(ConfigKeyName.HBASE_SCAN_STOP_ROW);
        }

        if (config.hasPath(ConfigKeyName.HBASE_GET_DATA_METHOD)){
            method = config.getString(ConfigKeyName.HBASE_GET_DATA_METHOD);
        }else {
            method = "scan";
        }

    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }


    /**
     * 初始化Kerberos环境
     */
    private void initKerberosENV(Configuration conf) {
        System.setProperty("java.security.krb5.conf", krbPath);
        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
        System.setProperty("sun.security.krb5.debug", "true");
        System.setProperty("java.security.auth.login.config",jaasFile);
        try {
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
            // 打印kdc debug信息
            logger.info(UserGroupInformation.getCurrentUser()+"");
            logger.info(UserGroupInformation.getLoginUser()+"");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 初始化hbase kerberos环境
     * @return Connection
     */
    private Configuration getHbaseKerberosEnv(){
//        Connection connection = null;
        Configuration configuration = null;
        try {
            // 设置 hbase 登录选项
            configuration = new Configuration();
            configuration.addResource(new Path(confPath + File.separator + "core-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hdfs-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hbase-site.xml"));
            configuration.set("hbase.zookeeper.quorum", zookeeperQuorum);
            configuration.set("hbase.master.kerberos.principal", masterPrincipal);
            configuration.set("hbase.regionserver.kerberos.principal", regionserverPrincipal);
            configuration.set("hbase.security.authentication", "kerberos");
            configuration.set("hadoop.security.authentication", "kerberos");
            configuration.set("zookeeper.znode.parent", znodeParent);
            // 初始化kdc连接
            initKerberosENV(configuration);
//            connection = ConnectionFactory.createConnection(configuration);
        }catch (Exception e){
            e.printStackTrace();
        }
        return configuration;
    }

    private Configuration getHbaseEnv(){
//        Connection connection = null;
        Configuration configuration = null;
        try {
            // 设置 hbase 登录选项
            configuration = new Configuration();
            configuration.set("hbase.zookeeper.quorum", zookeeperQuorum);
            configuration.set("zookeeper.znode.parent", znodeParent);
            configuration.addResource(new Path(confPath + File.separator + "core-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hdfs-site.xml"));
            configuration.addResource(new Path(confPath + File.separator + "hbase-site.xml"));
//            connection = ConnectionFactory.createConnection(configuration);
        }catch (Exception e){
            e.printStackTrace();
        }
        return configuration;
    }

    public static class GetFunction implements Function<byte[], Get> {

        private static final long serialVersionUID = 1L;

        public Get call(byte[] v) throws Exception {
            return new Get(v);
        }
    }

    public static class ResultFunction implements Function<Result, String> {

        private static final long serialVersionUID = 1L;

        public String call(Result result) throws Exception {
            Iterator<Cell> it = result.listCells().iterator();
            JSONObject jsonObject = new JSONObject();

            jsonObject.put("rowkey",Bytes.toString(result.getRow()));

            while (it.hasNext()) {
                Cell cell = it.next();

                jsonObject.put(new String(CellUtil.cloneQualifier(cell),StandardCharsets.UTF_8),
                        new String(CellUtil.cloneValue(cell), StandardCharsets.UTF_8));

            }
            return jsonObject.toJSONString();
        }
    }
    private static class ScanConvertFunction implements Function<Tuple2<ImmutableBytesWritable, Result>, String> {

        private static final long serialVersionUID = 1L;

        public String call(Tuple2<ImmutableBytesWritable, Result> v1) throws Exception {

            JSONObject jsonObject = new JSONObject();

            Result result = v1._2();

            for (Cell cell : result.listCells()) {
                jsonObject.put("rowkey",Bytes.toString(result.getRow()));
                jsonObject.put(new String(CellUtil.cloneQualifier(cell),StandardCharsets.UTF_8),
                        new String(CellUtil.cloneValue(cell), StandardCharsets.UTF_8));
            }
            return jsonObject.toJSONString();

        }
    }

    /**
     * 数据扫描方法
     * @param method scan get
     * @param hBaseContext 环境上下文
     * @param tableName 扫描表名称
     * @return JavaRDD<String>
     */
    public static JavaRDD<String> getData(String method,JavaHBaseContext hBaseContext,TableName tableName){
        if (method.equals("scan")){
            Scan scan = new Scan();
            if (cacheSize == 0){
                scan.setCaching(10000);
            }

            if (!StringUtils.isBlank(startRow)){
                scan.setStartRow(Bytes.toBytes(startRow));
            }
            if (!StringUtils.isBlank(stopRow)){
                scan.setStopRow(Bytes.toBytes(stopRow));
            }
            JavaRDD<Tuple2<ImmutableBytesWritable, Result>> javaRDDScan = hBaseContext.hbaseRDD(tableName, scan);
            JavaRDD<String> javaRDDString = javaRDDScan.map(new ScanConvertFunction());
            return javaRDDString;
        }else if (method.equals("get")){
            String[] datas = rowData.split(",");
            List<byte[]> list = new ArrayList<>();
            for (int i = 0 ; i < datas.length; i++){
                list.add(Bytes.toBytes(datas[i]));
            }
            JavaRDD<byte[]> rdd = jsc.parallelize(list);
            JavaRDD<String> javaRdd =  hBaseContext.bulkGet(tableName,10000,rdd,new GetFunction(),new ResultFunction());
            return javaRdd;
        }
        return null;
    }

    private JavaRDD<Row> changToRow(JavaRDD<String> javaRDD,SparkEnvironment environment){
        StructField[] structFields = environment.getStructType().fields();
        return javaRDD.map(new Function<String, Row>() {
            @Override
            public Row call(String s) throws Exception {
                List<String> list = new ArrayList<>();
                JSONObject jsonObject = JSONObject.parseObject(s);
                for (int i = 0 ; i < structFields.length ; i++){
                    if (jsonObject.containsKey(structFields[i].name())){
                        list.add(jsonObject.get(structFields[i].name()).toString());
                    }
                    else {
                        list.add("");
                    }
                }
                return RowFactory.create(list.toArray(new String[0]));
            }
        });
    }
}
