package com.kingsoft.dc.khaos.module.spark.source

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.ds.ESConnect
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.metadata.source.EsSourceConfig
import com.kingsoft.dc.khaos.module.spark.model.MetaDataEntity
import com.kingsoft.dc.khaos.module.spark.util.{ESUserProvider, FileUtils, MetaUtils}
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.SparkFiles
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{DataType, StructType}
import org.elasticsearch.hadoop.cfg.ConfigurationOptions
import org.elasticsearch.spark.sql.EsSparkSQL
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.collection.mutable

/**
 * describe: 从es中读取数据返回dataframe, es-hadoop 不支持在一个index中同时存在时间戳数字和格式化时间，es-hadoop读取有毫秒的时间时，
 * 需要开启 es.mapping.date.rich = false，spark会读取成string格式，然后在此类中cast成timestamp格式，es-hadoop 会讲es的objectg格式
 * 转为struct，es-haoop不支持range格式，且其他不常用类型，es-haoop无法提供很好的支持，具体参见：
 * https://github.com/elastic/elasticsearch-hadoop
 *
 * author: liuzehui
 * create time: 2022-02-25 11:04
 */
class ESSource extends SourceStrategy with Logging {

  /**
   * 参数，初始默认参数，默认开启谓词下推,默认将es时间格式转为spark的string处理
   */
  private val options = scala.collection.mutable.Map[String, String](
    "es.nodes.wan.only" -> "true",
    "pushdown" -> "true",
    "es.http.retries" -> "5",
    "es.mapping.date.rich" -> "false"
  )
  private var _kc: KhaosContext = null
  private var esConnect: ESConnect = null
  private var spnegoPrincipal = ""
  private var principal = ""
  private var keytab = ""

  /** 数据抽取 */
  override def source(kc: KhaosContext, module_id: String, config: String, dependence: Dependency): DataFrame = {
    implicit val formats: DefaultFormats.type = DefaultFormats
    _kc = kc
    val esSourceConfig = JsonMethods.parse(config, useBigDecimalForDouble = true).extract[EsSourceConfig]
    log.info(s"get config => $config")
    log.info(s"get esSourceConfig => $esSourceConfig")


    val esMeta: MetaDataEntity = init(esSourceConfig)
    log.info("通过鉴权并获得元数据信息")

    buildOptions(esSourceConfig, esMeta)
    log.info(s"get config options is $options")

    //从es中读取原始df,不做try-catch处理，有报错直接由es-hadoop包抛出
    val originalDF = EsSparkSQL.esDF(_kc.sparkSession,
      s"${esSourceConfig.db_name}/${esSourceConfig.table_name}",
      options.toMap)

    //处理es数据类型适配spark类型
    handleDataType(originalDF, esSourceConfig, options)
  }

  /**
   * 鉴权并获取es客户端元数据信息
   */
  def init(esSourceConfig: EsSourceConfig): MetaDataEntity = {
    //鉴权
    MetaUtils.checkReadAuth(
      _kc,
      esSourceConfig.db_name,
      esSourceConfig.table_name,
      esSourceConfig.extender.auth.clazz,
      compact(render(esSourceConfig.extender.auth.params)))

    log.info("begin getting es meta info ---")
    MetaUtils.getESMeta(
      _kc,
      esSourceConfig.db_name,
      esSourceConfig.table_name,
      esSourceConfig.extender.meta.clazz,
      compact(render(esSourceConfig.extender.meta.params)))
  }

  /**
   * 构建配置参数
   */
  def buildOptions(esSourceConfig: EsSourceConfig, esMeta: MetaDataEntity): Unit = {
    esConnect = esMeta.getDsEsConnect

    log.info(s"esConnect param: ${esConnect.toString}")

    //增加es节点和端口信息
    options ++= getNodesAndPort(esConnect.getHttpUrls)

    //过滤功能，支持es query过滤数据，options配置默认谓词下推
    val filter = esSourceConfig.filter
    if (filter.nonEmpty) options += ("es.query" -> filter)

    // es的用户密码非必填选项
    if (esConnect.getUsername != null && esConnect.getPassword != null) {
      options += (
        "es.net.http.auth.user" -> esConnect.getUsername,
        "es.net.http.auth.pass" -> esConnect.getPassword
      )
    }
    delConnectType()
  }

  //不同连接方式的处理逻辑
  def delConnectType() = {
    log.info("es_ConnectType==>" + esConnect.getConnectType)
    esConnect.getConnectType match {
      case "http" =>
      // http 模式情况下，无需增加额外的认证信息
      case "https" =>

        //解压base64格式的文件，并重写到spark app 临时文件夹中
        val (keyStorePath, trustStorePath) = getSSLFile(esConnect)

        //增加ssl认证信息
        options += (
          "es.net.ssl" -> "true",

          "es.net.ssl.keystore.type" -> (if (esConnect.getKeyStoreType.isEmpty) "JKS" else esConnect.getKeyStoreType),
          "es.net.ssl.keystore.pass" -> esConnect.getKeyStorePass,
          "es.net.ssl.keystore.location" -> s"file://$keyStorePath",

          "es.net.ssl.truststore.pass" -> esConnect.getTrustStorePass,
          "es.net.ssl.truststore.location" -> s"file://$trustStorePath"
        )

      case "krbsType" =>
        throw new UnsupportedOperationException("essource暂时不支持kerberos认证")
      //        log.info("ESSource driver端开启kerberos认证中......")
      //        principal = _kc.conf.getString("proxy.user")
      //        log.info("principal=>" + principal)
      //        keytab = principal + ".keytab"
      //        val keytabPath = System.getenv("SPARK_YARN_STAGING_DIR") + "/" + keytab
      //        _kc.sparkSession.sparkContext.addFile(keytabPath)
      //        initKerberosConf()

      case _ =>
        log.error("elasticsearch 未知连接方式!")
        throw new Exception("elasticsearch 未知连接方式!")
    }
  }

  def initKerberosConf(): Unit = {
    spnegoPrincipal = "HTTP/" + esConnect.getPrincipal.split("/")(1)
    log.info("spnegoPrincipal=>" + spnegoPrincipal)
    val krb5Conf = _kc.conf.getString("proxy.krb5.conf")
    log.info("krb5Conf=>" + krb5Conf)
    val keytabPath = SparkFiles.get(keytab)
    log.info("keytabPath===>" + keytabPath)
    System.setProperty("sun.security.krb5.debug", "true")
    System.setProperty("sun.security.spnego.debug", "true")
    System.setProperty("es.security.indication", "true")
    System.setProperty("java.security.krb5.conf", krb5Conf)
    options += (
      "es.http.timeout" -> "5m",
      "es.http.retries" -> "50",
      ConfigurationOptions.ES_SECURITY_AUTHENTICATION -> "kerberos",
      ConfigurationOptions.ES_NET_SPNEGO_AUTH_ELASTICSEARCH_PRINCIPAL -> spnegoPrincipal,
      ConfigurationOptions.ES_SECURITY_USER_PROVIDER_CLASS -> "com.kingsoft.dc.khaos.module.spark.util.ESUserProvider",
      ESUserProvider.SPARK_ES_PRINCIPAL -> principal,
      ESUserProvider.SPARK_ES_KEYTAB -> keytabPath,
      ESUserProvider.ES_NET_SPNEGO_AUTH_ELASTICSEARCH_PRINCIPAL -> spnegoPrincipal
    )
  }

  /**
   * 将ssl认证文件写到spark任务的临时文件夹中并返回地址
   */
  def getSSLFile(esConnect: ESConnect): (String, String) = {

    val localDirs = System.getenv("LOCAL_USER_DIRS")
    val local = if (localDirs.contains(",")) localDirs.split(",")(0) else localDirs

    val keyStorePath = s"$local/es_ssl_source/${esConnect.getKeyStorePath.split("/").last}"
    val trustStorePath = s"$local/es_ssl_source/${esConnect.getTrustStorePath.split("/").last}"

    FileUtils.decoderBase64File(esConnect.getKeyStoreFile, keyStorePath)
    FileUtils.decoderBase64File(esConnect.getTrustStoreFile, trustStorePath)

    (keyStorePath, trustStorePath)
  }

  /**
   * 获取es节点以及端口信息
   */
  def getNodesAndPort(httpUrls: String): Map[String, String] = {
    if (httpUrls.isEmpty) {
      throw new NoSuchElementException("es.nodes is null")
    } else {
      if (httpUrls.contains(",")) {
        Map("es.nodes" -> httpUrls)
      } else {
        Map(
          "es.nodes" -> httpUrls.split(":")(1).substring(2),
          "es.port" -> httpUrls.split(":")(2).replace("/", "")
        )
      }
    }
  }

  /**
   * 数据类型适配
   */
  def handleDataType(df: DataFrame, esSourceConfig: EsSourceConfig, options: mutable.Map[String, String]): DataFrame = {
    val columns = esSourceConfig.extract_fields
      .map { v =>
        v.data_type.toLowerCase match {
          case "object" =>
            // es object类型被spark读成struct类型，在这里处理成json
            to_json(col(v.field)).as(v.field)
          case _ =>
            col(v.field)
        }
      }
    df.select(columns: _*)
  }


  /**
   * 构建schema，重新定义精度,用于解决spark读取float类型时，精度损失的方法，暂时弃用
   */
  @deprecated("重新定义精度,用于解决spark读取float类型时，精度损失的方法，暂时弃用")
  def buildSchema(esSourceConfig: EsSourceConfig): StructType = {

    val tuples: List[(String, String)] = esSourceConfig.extract_fields.map { v =>
      v.data_type match {
        case "NUMBER" => (v.field, "long")
        case "BINARY" => (v.field, "binary")
        case "BOOLEAN" => (v.field, "boolean")
        case "DATE" => (v.field, "timestamp")
        case "DECIMAL" => (v.field, "double")
        case "STRING" => (v.field, "string")
        case _ =>
          throw new UnsupportedOperationException(s"不支持的数据类型${v.data_type}")
      }
    }

    DataType.fromDDL(tuples.map(v => s"${v._1} ${v._2}").mkString(",")).asInstanceOf[StructType]
  }


  override def schema(dc: KhaosContext, config: String, dependence: Dependency): List[KhaosStructField] = {
    implicit val formats: DefaultFormats.type = DefaultFormats
    JsonMethods.parse(config, useBigDecimalForDouble = true).extract[EsSourceConfig]
      .extract_fields.map(ef => KhaosStructField(ef.field, ef.data_type))
  }

}
