package org.apache.spark


import java.util.{Map => JMap}

import org.apache.spark.internal.config._

import scala.collection.JavaConverters._

/**
 * @author leo.jie (weixiao.me@aliyun.com)
 * @organization DataReal
 * @version 1.0
 * @website https://www.jlpyyf.com
 * @date 2019/12/11 11:15 下午
 * @since 1.0
 */
object LSQLConf {
  private[this] val lSqlConfEntries = new java.util.HashMap[String, ConfigEntry[_]]()

  def register(entry: ConfigEntry[_]): Unit = {
    require(!lSqlConfEntries.containsKey(entry.key),
      s"重复的配置 ${entry.key} 已经被注册！")
    lSqlConfEntries.put(entry.key, entry)
  }

  def entries: java.util.HashMap[String, ConfigEntry[_]] = lSqlConfEntries

  private[this] object LSQLConfigBuilder {
    def apply(key: String): ConfigBuilder = ConfigBuilder(key).onCreate(register)
  }

  val L_SQL_NAME: ConfigEntry[String] = LSQLConfigBuilder("hourglass.name")
    .doc("显示在SparkUI上的Application名称")
    .stringConf.createWithDefault("HourglassLSql")

  val L_SQL_ZK: ConfigEntry[String] = LSQLConfigBuilder("hourglass.zkAddress")
    .doc("zookeeper地址")
    .stringConf.createWithDefault("localhost:2181")

  val L_AKKA_ENGINE_PORT: ConfigEntry[Int] = LSQLConfigBuilder("hourglass.engine.port")
    .doc("akka通讯端口")
    .intConf.createWithDefault(2550)

  val L_AKKA_LOG_LEVEL: ConfigEntry[String] = LSQLConfigBuilder("hourglass.akka.logLevel")
    .doc("akka日志调试级别")
    .stringConf.createWithDefault("info")

  val L_SQL_MASTER: OptionalConfigEntry[String] = ConfigBuilder("hourglass.master")
    .doc("与spark任务提交的模式一样")
    .stringConf
    .createOptional

  val L_SQL_ENGINE_TAG: ConfigEntry[String] = ConfigBuilder("hourglass.engine.tag")
    .doc("引擎执行时指定的tag")
    .stringConf
    .createWithDefault("default")

  val L_SQL_ENABLE_HIVE_SUPPORT: ConfigEntry[Boolean] = LSQLConfigBuilder("hourglass.enableHiveSupport").doc(
    """
      |是否开启hive支持
    """.stripMargin).booleanConf.createWithDefault(false)

  val INIT_HIVE_CATALOG: ConfigEntry[Boolean] = ConfigBuilder("spark.hourglass.hiveCatalog.init.enable")
    .doc("是否获取hive元数据显示在web端")
    .booleanConf
    .createWithDefault(false)

  val HIVE_CATALOG_AUTO_COMPLETE: ConfigEntry[Boolean] = ConfigBuilder("spark.hourglass.hiveCatalog.autoComplete")
    .doc("SQL编辑框是否获取hive元数据以自动补全")
    .booleanConf
    .createWithDefault(false)

  val HOURGLASS_AUTH_ENABLE: ConfigEntry[Boolean] = ConfigBuilder("spark.hourglass.auth.enable")
    .doc("是否开启权限验证")
    .booleanConf
    .createWithDefault(false)

  val HOURGLASS_PARALLELISM: ConfigEntry[Int] = ConfigBuilder("spark.hourglass.parallelism")
    .doc("engine并行度")
    .intConf
    .createWithDefault(3)

  val MAIL_ENABLE: ConfigEntry[Boolean] = ConfigBuilder("spark.mail.enable")
    .doc("是否启用邮件通知")
    .booleanConf
    .createWithDefault(false)

  val STREAM_JOB_MAX_ATTEMPTS: ConfigEntry[Int] = ConfigBuilder("spark.streamJob.MaxAttempts")
    .doc("实时任务失败重启次数")
    .intConf
    .createWithDefault(3)

  def getAllDefaults: Map[String, String] = {
    entries.entrySet().asScala.map { kv =>
      (kv.getKey, kv.getValue.defaultValueString)
    }.toMap
  }

  def createConfigReader(settings: JMap[String, String]): ConfigReader = {
    val reader = new ConfigReader(new LSQLConfigProvider(settings))
    reader
  }
}

private[spark] class LSQLConfigProvider(conf: JMap[String, String]) extends ConfigProvider {

  override def get(key: String): Option[String] = {
    if (key.startsWith("hourglass.")) {
      Option(conf.get(key)).orElse(SparkConf.getDeprecatedConfig(key, conf))
    } else {
      None
    }
  }

}
