package com.kingsoft.dc.khaos.module.spark.source

import java.util.Properties
import com.alibaba.fastjson.JSON
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.dsl.spark.udf.constants.UdfInfo
import com.kingsoft.dc.khaos.dsl.utils.UdfUtils
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.extender.meta.utils.RSAEncrypt
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.constants.HiveConstants
import com.kingsoft.dc.khaos.module.spark.metadata.source.HiveSourceConfig
import com.kingsoft.dc.khaos.module.spark.model.MetaDataEntity
import com.kingsoft.dc.khaos.module.spark.util.{DataTypeConvertUtils, FileUtils, HiveDataSourceUtil, HiveDialect, HiveUtils, MetaUtils, SparkJobHelper}
import com.kingsoft.dc.khaos.util.Logging
import org.apache.hadoop.fs.FileSystem
import org.apache.spark.SparkFiles
import org.apache.spark.sql.jdbc.JdbcDialects
import org.apache.spark.sql.{Column, DataFrame, Row, SparkSession}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.{compact, parse, render}
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hadoop.security.UserGroupInformation.setLoginUser
import org.apache.hadoop.security.authentication.util.KerberosName
import sun.security.krb5.Config

import java.sql.{Connection, DriverManager, Statement}

/**
 *
 * create by goosoog 2019/6/13.
 *
 */
class HiveSource extends SourceStrategy with Logging {
  private var meta: MetaDataEntity = null
  private var hiveProperties: Map[String, String] = _

  private var HIVE_PRINCIPAL: String = _
  private var HIVE_KEYTABPATH: String = _
  private var HIVE_KRB5PATH: String = _
  private var sparkSession: SparkSession = _

  def init(kc: KhaosContext, hiveSourceConf: HiveSourceConfig): Unit = {
    //鉴权
    val checkResult = MetaUtils.checkReadAuth(
      kc,
      hiveSourceConf.db_name,
      hiveSourceConf.table_name,
      hiveSourceConf.extender.auth.clazz,
      compact(render(hiveSourceConf.extender.auth.params)))

    //获取元数据
    meta = MetaUtils.getHiveMeta(
      kc,
      hiveSourceConf.db_name,
      hiveSourceConf.table_name,
      hiveSourceConf.extender.meta.clazz,
      compact(render(hiveSourceConf.extender.meta.params)),
      this)
    hiveProperties = kc.conf.getAllWithUnPrefix("module.").toMap
    sparkSession = kc.sparkSession
  }

  /**
   * 拉起udf注册信息
   *
   * @param
   * @return
   */
  def getUdfInfo(functions: String, resourcePath: String): List[UdfInfo] = {
    import scala.collection.JavaConverters._
    //    val functions = kc.conf.getString("functions","")
    //    val resourcePath = kc.conf.getString("job.executing.dir","")
    val udfList = JSON.parseArray(functions, classOf[UdfInfo]).asScala.toList
    for (udf <- udfList) {
      val fileName = udf.getPackageName
      udf.setJarAbsolutePath(
        resourcePath + "/" + fileName + "." + udf.getPackageType)
    }
    udfList
  }

  /**
   * 获取hive JDBC配置
   * @return
   */
  def getHiveJdbcProps(): Properties = {
    val properties = new Properties()
    properties.setProperty("username", meta.getDsHiveConnect.getUserName)
    properties.setProperty("driverClassName", "org.apache.hive.jdbc.HiveDriver")
    properties.setProperty("password", RSAEncrypt.decryptByRSAPassWord(meta.getDsHiveConnect.getPassword))
    properties
  }

  /**
   *获取hive JDBC sql
   * @param dbName
   * @param tableName
   * @param filter
   * @param columnMapArr
   * @return
   */
  def getJdbcSql(dbName: String, tableName: String, filter: String, columnMapArr: ArrayBuffer[(String, String)]) = {
    val fields = new ArrayBuffer[String]()
    for ((fieldName, fieldType) <- columnMapArr) {
      fields.append(fieldName)
    }
    if (StringUtils.isBlank(filter)) {
      s"select ${fields.mkString(",")} from $dbName.$tableName"
    } else {
      s"select ${fields.mkString(",")} from $dbName.$tableName where $filter"
    }
  }

  /**
   * 查询并构造df
   * @param url
   * @param dbName
   * @param tableName
   * @param filter
   * @param columnMapArr
   * @return
   */
  def selectAndBuildDfWithKrb(url: String, dbName: String, tableName: String, filter: String, columnMapArr: ArrayBuffer[(String, String)]) = {
    val hiveProps: Properties = getHiveJdbcProps()
    //获取hive jdbc连接
    var connection: Connection = null
    var statm: Statement = null
    val sql = getJdbcSql(dbName, tableName, filter, columnMapArr)
    logInfo(s"jdbc sql=[$sql]")
    try {
      //获取hive jdbc连接
      connection = DriverManager.getConnection(url, hiveProps)
      statm = connection.createStatement()
      val resultSet = statm.executeQuery(sql)
      logInfo("查询成功!")
      val schema = SparkJobHelper.dynamicBuildDFSchema(columnMapArr)
      val rowList = new ArrayBuffer[Row]()
      while (resultSet.next()) {
        var index = 1
        val lineArr = new ArrayBuffer[String](schema.size)
        for ((fieldName, fieldType) <- columnMapArr) {
          val value = resultSet.getString(index)
          lineArr.append(value)
          index = index + 1
        }
        //        logInfo(s"lineArr:${lineArr.mkString("[", ",", "]")}")
        rowList.append(DataTypeConvertUtils.convertDataLine(schema, lineArr.toArray))
      }
      val rowRDD = sparkSession.sparkContext.parallelize(rowList)
      sparkSession.createDataFrame(rowRDD, schema)
    }
    catch {
      case e: Throwable => throw new Exception("select&build failed！", e)
    } finally {
      if (statm != null) {
        statm.close()
      }
      if (connection != null) {
        connection.close()
      }
    }
  }

  override def source(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependence: Dependency): DataFrame = {
    implicit val formats = DefaultFormats
    val hiveSourceConf = parse(config, true).extract[HiveSourceConfig]
    init(kc, hiveSourceConf)
    val sparkSession: SparkSession = kc.sparkSession
    val dbName = hiveSourceConf.db_name
    val tableName = hiveSourceConf.table_name
    val filter = hiveSourceConf.filter

    //hive udf注册
    val functions = kc.conf.getString("functions", "")
    val resourcePath = kc.conf.getString("job.executing.dir", "")
    if (StringUtils.isNotBlank(functions) && StringUtils.isNotBlank(
      resourcePath)) {
      val udfList = getUdfInfo(functions, resourcePath)
      UdfUtils.registerHiveUdf(sparkSession, udfList)
    } else {
      logInfo(s"=>>>functions注册信息或者udf资源目录为空")
    }

    if (meta.getDefaultDs) {
      var sql = s"select * from ${dbName}.${tableName}"
      if (filter != null && !filter.trim.equals("")) {
        sql = s"${sql} where ${filter}"
      }
      val data: DataFrame = sparkSession.sqlContext.sql(s"${sql}")
      //logInfo(s"hive read data limit 5 : ${data.show(5)}")
      data

    } else { //外部数据源
      logInfo(s"=>>> jdbc read mode ...")

      //取数据管理接口的所有字段名称和字段类型
      val columnMapArr = new ArrayBuffer[(String, String)]()
      val columnArr = new ArrayBuffer[Column]()
      val columnInfoMetaList: java.util.List[DmTableColumn] = meta.getColumnEntiy
      for (i <- 0 until columnInfoMetaList.size) {
        columnMapArr += (columnInfoMetaList.get(i).getColName -> columnInfoMetaList.get(i).getColType)
      }

      //TODO新加读取外部hive，支持kerberos认证
      if (meta.getDsHiveConnect.getUseKrbs.toBoolean) { //开启kerberos,需要做集群kerberos互信
        log.info("外部hive数据源 [开启kerberos]")
        //        initKerberos(kc, module_id)
        val hProperties: Properties = getHiveServerProps(dbName)
        val url = HiveUtils.getJdbcUrl(meta.getDsHiveConnect.getPrincipal, hProperties)
        //        authKerberosAccess()
        val jdbcDF = selectAndBuildDfWithKrb(url, dbName, tableName, filter, columnMapArr)
        jdbcDF
      } else {
        JdbcDialects.registerDialect(HiveDialect)
        val driver = "org.apache.hive.jdbc.HiveDriver"
        var url = ""
        if (meta.getDsHiveConnect.getConnectType != null && meta.getDsHiveConnect.getConnectType == "Cluster") { //Cluster
          throw new Exception("不支持外部hive集群模式！")
          //        url = s"jdbc:hive2://${meta.getDsHiveConnect.getHost};serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=${meta.getDsHiveConnect.getZookeeperNamespace};principal=${meta.getDsHiveConnect.getMasterPrincipal}"
        } else { // null和Single
          url = s"jdbc:hive2://${meta.getDsHiveConnect.getHost}:${meta.getDsHiveConnect.getPort}/${dbName}"
        }

        val table = s"${dbName}.${tableName}"
        val fetchsize = hiveProperties.getOrElse(HiveConstants.SOURCE_JDBC_FETCHSIZE, "1000").toString
        val predicates = Array[String](filter)
        var jdbcDF: DataFrame = null
        val props = new Properties
        props.put("driver", driver)
        props.put("user", meta.getDsHiveConnect.getUserName)
        props.put("password", RSAEncrypt.decryptByRSAPassWord(meta.getDsHiveConnect.getPassword))
        props.put("fetchsize", fetchsize)
        //过滤条件
        if (StringUtils.isNotBlank(filter)) {
          jdbcDF = sparkSession.read.jdbc(url, table, predicates, props)
        } else {
          jdbcDF = sparkSession.read.jdbc(url, table, props)
        }
        //兼容字段名为table.field的情况
        val isFullName = jdbcDF.schema.fieldNames.head.contains(".")
        for (col <- columnMapArr) {

          if (isFullName) {
            jdbcDF = jdbcDF.withColumn(col._1,
              jdbcDF.col(s"`$tableName." + col._1 + "`"))
          } else {
            jdbcDF = jdbcDF.withColumn(col._1, jdbcDF.col(col._1))
          }
          columnArr.append(jdbcDF.col(col._1))
        }
        jdbcDF.select(columnArr: _*)
      }
    }
  }

  override def schema(kc: KhaosContext,
                      config: String,
                      dependence: Dependency): List[KhaosStructField] = {
    val fieldSchema = ArrayBuffer[KhaosStructField]()
    implicit val formats = DefaultFormats
    val info = parse(config, true).extract[HiveSourceConfig]
    val extrFields = info.extract_fields
    for (ef <- extrFields) {
      fieldSchema += KhaosStructField(ef.field, ef.data_type)
    }
    fieldSchema.toList
  }

  /**
   * 添加kerberos认证信息
   *
   * @param module_id
   * @return
   */
  def initKerberos(kc: KhaosContext, module_id: String = "") = {
    val connect = meta.getDsHiveConnect
    //    HIVE_PRINCIPAL = connect.getPrincipal
    //数据管理给的keytab krb5文件
    HIVE_PRINCIPAL = connect.getPrincipal
    val keytabFile: String = connect.getKeytabFile
    val krb5File: String = connect.getKrb5File

    //加module_id是防止同个任务操作多个hbase
    val keytabPath
    : String = System.getenv("SPARK_YARN_STAGING_DIR") + "/hive_sink_" + module_id + ".keytab"
    val krb5Path
    : String = System.getenv("SPARK_YARN_STAGING_DIR") + "/krb5_" + module_id + ".conf"
    //        val krb5Path: String = System.getenv("SPARK_YARN_STAGING_DIR") + "/krb5.conf"
    FileUtils.decoderBase64File(
      keytabFile,
      keytabPath,
      FileSystem.get(sparkSession.sparkContext.hadoopConfiguration))
    FileUtils.decoderBase64File(
      krb5File,
      krb5Path,
      FileSystem.get(sparkSession.sparkContext.hadoopConfiguration))
    HIVE_KEYTABPATH = keytabPath
    HIVE_KRB5PATH = krb5Path
    kc.conf.set("proxy.krb5.conf", "")

    //    log.info("外部hive 开启kerberos")
    log.info("==>user:" + HIVE_PRINCIPAL)
    log.info("==>keytab:" + HIVE_KEYTABPATH)
    log.info("==>krb5:" + HIVE_KRB5PATH)
    kc.sparkSession.sparkContext.addFile(HIVE_KRB5PATH)
    kc.sparkSession.sparkContext.addFile(HIVE_KEYTABPATH)
  }

  def getHiveServerProps(dbName: String) = {
    val props = new Properties()
    props.put("hive.driver", "org.apache.hive.jdbc.HiveDriver")
    var url = ""
    //    if (meta.getDsHiveConnect.getConnectType != null && meta.getDsHiveConnect.getConnectType == "Cluster") { //Cluster
    //      props.put("hive.connect.zookeeperNamespace", meta.getDsHiveConnect.getZookeeperNamespace)
    //      throw new Exception("不支持外部hive集群模式！")
    //      //          url = s"jdbc:hive2://${meta.getDsHiveConnect.getHost};serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=${meta.getDsHiveConnect.getZookeeperNamespace};principal=${meta.getDsHiveConnect.getMasterPrincipal}"
    //    } else { // null和Single
    //      url = s"jdbc:hive2://${meta.getDsHiveConnect.getHost}:${meta.getDsHiveConnect.getPort}/${dbName}"
    //    }
    if (meta.getDsHiveConnect.getUseKrbs.toBoolean) { //开启kerberos
      url = url + s";principal=${HIVE_PRINCIPAL}"
    }

    props.put("hive.jdbc.url", url)
    props.put("hive.jdbc.db", dbName)
    props.put("hive.jdbc.host", meta.getDsHiveConnect.getHost)
    props.put("hive.jdbc.port", meta.getDsHiveConnect.getPort)
    //    props.put("hive.connect.type", meta.getDsHiveConnect.getConnectType)
    props.put("hive.jdbc.username", meta.getDsHiveConnect.getUserName)
    props.put(
      "hive.jdbc.password",
      RSAEncrypt.decryptByRSAPassWord(meta.getDsHiveConnect.getPassword))
    props.put(
      "hive.initialSize",
      hiveProperties.getOrElse(HiveConstants.HIVE_INITIALSIZE, "5")) //默认5
    props.put("hive.minIdle",
      hiveProperties.getOrElse(HiveConstants.HIVE_MINIDLE, "20")) //20
    props.put(
      "hive.maxActive",
      hiveProperties.getOrElse(HiveConstants.HIVE_MAXACTIVE, "500")) //500
    props.put(
      "hive.maxWait",
      hiveProperties.getOrElse(HiveConstants.HIVE_MAXWAIT, "60000")) //60000
    props.put(
      HiveConstants.SUBMIT_THREAD_POOL,
      hiveProperties.getOrElse(HiveConstants.SUBMIT_THREAD_POOL, "5")) //5
    props
  }

  /**
   * kerberos认证
   */
  def authKerberosAccess(): Unit = {
    val keytabPath: String = SparkFiles.get(HIVE_KEYTABPATH.split("/").last)
    val krb5path: String = SparkFiles.get(HIVE_KRB5PATH.split("/").last)

    HiveUtils.loginKerberos(krb5path, keytabPath, HIVE_PRINCIPAL)
    /*   val in = this.getClass.getClassLoader.getResourceAsStream("/")
       val myClassLoader = new MyClassLoader("loadKerberos")
   //    val path = this.getClass.getProtectionDomain().getCodeSource().getLocation().getPath()
       val path = System.getProperty("user.dir")
       log.info("class path:{}", path)
       myClassLoader.setPath(path)

       val clazz = myClassLoader.loadClass("com.kingsoft.dc.khaos.module.spark.source.HiveSource")
       log.info("load result:{}", clazz)
       clazz.getMethod("loginKerberos", classOf[String], classOf[String], classOf[String])
         .invoke(clazz.newInstance(), krb5path, keytabPath, HIVE_PRINCIPAL)*/

    log.info(s"==> hive sink user $HIVE_PRINCIPAL Login successful !")
  }

  def loginKerberos(krb5Path: String,
                    keytabPath: String,
                    principal: String) {
    var ugi: UserGroupInformation = null;
    try {
      log.info(" =>kerberos Info krb5Path:{} keytabPath:{} principal:{}", krb5Path, keytabPath, principal);
      System.setProperty("java.security.krb5.conf", krb5Path);
      Config.refresh();
      setLoginUser(null);
      //      org.apache.hadoop.security.HadoopKerberosName;
      val conf: org.apache.hadoop.conf.Configuration = new Configuration();
      conf.set("hadoop.security.authentication", "kerberos");
      conf.setBoolean("hadoop.security.authorization", true);
      //            conf.setBoolean("hbase.security.authorization", true);
      //            conf.setBoolean("hbase.cluster.distributed", true);
      conf.set("hive.server2.authentication.kerberos.principal", principal);
      UserGroupInformation.setConfiguration(conf);
      ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytabPath);
      ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS);
      setLoginUser(ugi);
      log.info("=> kerberos rules:{}", KerberosName.getRules());
    } catch {
      case e: Exception => {
        throw new Exception(e);
      }
    }
  }
}
