package com.kingsoft.dc.khaos.module.spark.preprocess.specific

import java.util.Properties

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.Dependency
import com.kingsoft.dc.khaos.module.spark.constants.{KafkaConstants, SchedulerConstants}
import com.kingsoft.dc.khaos.module.spark.preprocess.transform.TransformStrategy
import com.kingsoft.dc.khaos.module.spark.util.MppUtils
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.DataFrame

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import com.alibaba.druid.filter.config.ConfigTools

/**
 * create by yansu on 2020/07/09 11:20
 */
class ManageOffset extends TransformStrategy with Logging {

  private var _host = "mysql-share.internal-bigdata.com"
  private var _port = "13306"
  private var _username = "di"
  private var _password = "Kingsoft.com123"
  private var _dbname = "di_manage"
  private var _tblname = "di_offset_test"
  private var _jdbc_driver = ""
  private var _jdbc_url_param = ""

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets: Seq[Dependency]) = {
    val partitionAndOffset = kc._valuesChannel.getValues[mutable.HashMap[String, String]]("partitionAndOffset")
    loadProperties(kc)
    //    val properties: Properties = FileConfigReader.load("di_kafka.properties")
    val ip = _host
    val port = _port
    val userName = _username
    val passWord = _password
    val db_name = _dbname
    var tbl_name = _tblname
    val dev = kc.conf.getString(SchedulerConstants.RUN_ENV)
    val jdbc_url_param = _jdbc_url_param
    val driver = _jdbc_driver
    val publicKey = "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKUJNn3fAkJ7FEtgAnwh8IZaV5VOEsP/V/tA2InE4aUtHkzTZKPUxa+kPkcPByx69bhzLrsTRg7t/Om7OcLEiwsCAwEAAQ=="
    dev.toLowerCase match {
      case "test" => {
        tbl_name = "di_offset_test"
      }
      case _ => {
        tbl_name = "di_offset_online"
      }
    }
    val decryptPassword = ConfigTools.decrypt(publicKey, passWord)
    //support mysql8
    val url = s"jdbc:mysql://$ip:$port/$db_name${jdbc_url_param}"
    //val driver = "com.mysql.jdbc.Driver"
    val prop = new Properties
    prop.put("driver", driver)
    prop.put("user", userName)
    prop.put("password", decryptPassword)
    val sqlList = new ArrayBuffer[String]()
    for (elem <- partitionAndOffset) {
      val partitions = elem._1
      val offset = elem._2
      var updateSQL = s"INSERT INTO ${tbl_name} " +
        s"(partitions,offset) " +
        s"VALUES ('${partitions}','${offset}') " +
        s"ON DUPLICATE KEY UPDATE " +
        s"partitions='${partitions}',offset='${offset}'"
      sqlList += updateSQL
    }
    sqlList.foreach((sql: String) => {
      println("offset sql => " + sql)
    })
    MppUtils.executeBatchs(url, userName, decryptPassword, sqlList.toList)
    val df: DataFrame = null
    addResult(Dependency(), df)
  }

  def loadProperties(kc: KhaosContext): Unit = {
    try {
      val kafkaProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.kafka.source.").toMap
      kafkaProperties.foreach { case (k, v) => log.info(k + "   " + v) }
      _host = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_HOST, "mysql-share.internal-bigdata.com")
      _port = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_PORT, "13306")
      _username = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_USERNAME, "di")
      _password = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_PASSWORD, "Kingsoft.com123")
      _dbname = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_DBNAME, "di_manage")
      _tblname = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_TBLNAME, "di_offset_test")
      _jdbc_driver = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_JDBC_DRIVER, "com.mysql.cj.jdbc.Driver") // for support mysql8
      _jdbc_url_param = kafkaProperties.getOrElse(KafkaConstants.MODULE_KAFKA_SOURCE_MYSQL_JDBC_URL_PARAM, "")
    } catch {
      case e: Exception =>
        log.error("未读取到MPP配置! 改用默认配置")
    }
  }

  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependences: Seq[Dependency]): Any = {
    new Schema(Nil)
  }
}
