package com.kingsoft.dc.khaos.module.spark.sink

import java.sql.Connection
import java.sql.DriverManager.getConnection
import java.util
import java.util.Properties

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.module.spark.constants.{ColumnType, MetaDataConstants, MysqlConstants, SchedulerConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.sink.{ExtractFieldInfo, MySQLConfig}
import com.kingsoft.dc.khaos.module.spark.model.center.metric.SyncProcessDataMetric
import com.kingsoft.dc.khaos.module.spark.model.{MetaDataEntity, RelationDataStatusInfo}
import com.kingsoft.dc.khaos.module.spark.util._
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.{Column, DataFrame}
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

class MysqlSinkDbs extends SinkStrategy with Logging {

  private var mysqlMeta: MetaDataEntity = null
  private var mysqlConfig: MySQLConfig = null
  private var ip = ""
  private var port = ""
  private var dbName = ""
  private var jdbcUrl = ""
  private var _jdbc_url_param = ""

  //需要初始化并赋值
  private var ifDeleteOn = false
  private var filter = ""


  /** 数据输出 */
  override def sink(kc: KhaosContext,
                    module_id: String,
                    config: JObject,
                    schema: Schema,
                    dataFrame: DataFrame): this.type = {
    //log.info(s"mysql writer start ......")
    //log.info(s"mysql writer config json : ${config.toString}")

    init(kc, config)

    val connect = mysqlMeta.dsMysqlConnect
    ip = connect.getHost
    port = connect.getPort
    val userName = connect.getUserName
    val passWord = connect.getPassWord
    dbName = mysqlConfig.db_name
    val tblName = mysqlConfig.table_name
    val writeType = mysqlConfig.write_option.toLowerCase
    val extractFields = mysqlConfig.extract_fields
    ifDeleteOn = mysqlConfig.IfDeleteOn

    var convertDF: DataFrame = setDefaultValue(extractFields, dataFrame)
    convertDF = convertDataType(extractFields, convertDF)

    val resultDF = convertDF

    jdbcUrl = s"jdbc:mysql://${ip}:${port}/$dbName${_jdbc_url_param}"
    val driver = "com.mysql.jdbc.Driver"
    val prop = new Properties
    prop.put("driver", driver)
    prop.put("user", userName)
    prop.put("password", passWord)
    //当savemode是overwrite时，truncate在MySQL中的表，而不是删除再重建其现有的表。
    if ("overwrite".equals(writeType)) {
      prop.put("truncate", "true")
    }

    //写入前删除数据按钮是否打开
    if (ifDeleteOn) {
      log.info("ifdeleteon is true")
      filter = mysqlConfig.filter
      val deleteSql: String = deleteTableQuery(tblName)
      log.info(s"deletesql: ${deleteSql}")
      //var df: DataFrame = kc.sparkSession.read.jdbc(jdbcUrl, deleteSql, prop)
      val desql: Boolean = executeSql(jdbcUrl, userName, passWord, deleteSql)
      if (desql) {
        log.info("删除历史数据成功")
      }
    }
    resultDF.write.mode(writeType).jdbc(jdbcUrl, tblName, prop)

    this
  }

  def executeSql(url: String, user: String, password: String, sql: String): Boolean = {
    var conn: Connection = null
    try {
      Class.forName("com.mysql.jdbc.Driver")
      conn = getConnection(url, user, password)
      val st = conn.createStatement
      for (elem <- sql.split(";", -1).toList) {
        st.addBatch(elem)
      }
      st.executeBatch()
      st.close
      conn.close
      true
    } catch {
      case e: Exception =>
        e.printStackTrace()
        throw e
        false
    }
  }

  // 生成删除表的SQL
  def deleteTableQuery(tblName: String): String = {
    var tableQuery = ""
    if (filter != null && !filter.trim.equals("")) {
      tableQuery = s"delete from $tblName where $filter"
    } else {
      throw new Exception("删除历史数据开关打开后，where条件不能为空")
    }
    tableQuery
  }

  /** 初始化参数 */
  def init(kc: KhaosContext, config: JObject): Unit = {
    if (true == false) {
      val str = "21202F2938212B3E22272626252E434D"
    }
    implicit val formats = DefaultFormats
    mysqlConfig = config.extract[MySQLConfig]
    val PROJECT_ID: Int = kc.conf.getString(SchedulerConstants.PROJECT_ID).toInt
    var metaParamsMap: Map[String, Any] = mysqlConfig.extender.meta.params.values
    metaParamsMap = metaParamsMap.updated("project_id", PROJECT_ID)
    import org.json4s.DefaultFormats
    import org.json4s.native.Json
    val metaJson: String = Json(DefaultFormats).write(metaParamsMap)

    //load config 配置文件参数
    loadProperties(kc)


    //获取元数据
    mysqlMeta = MetaUtils.getMysqlDs(kc,
      mysqlConfig.db_name,
      mysqlConfig.table_name,
      mysqlConfig.extender.meta.clazz,
      metaJson,
      this)
  }

  /** rename and 设置默认值 */
  def setDefaultValue(columnInfoMetaList: List[ExtractFieldInfo], data: DataFrame): DataFrame = {
    //只复制有连线关系的Column
    val colArr = new ArrayBuffer[Column]()
    for (ef <- columnInfoMetaList) {
      val to_field: String = ef.field
      val from_field: String = ef.from_field
      if (!ef.from_field.trim.equals("")) {

        colArr += data.col(from_field) as to_field
      } else {
        colArr += lit(null).cast(StringType) as to_field
      }
    }

    var value: DataFrame = data.select(colArr: _*)

    columnInfoMetaList.foreach(excol => {
      if (excol.field_props.default_value.nonEmpty) {
        val field: String = excol.field
        val default_value: String = excol.field_props.default_value
        value = value.withColumn(field, value.col(field).cast(StringType))
        value = value.na.fill(default_value, Array(field))
      } else {

      }
    })
    value
  }

  /**
   * load config properties 配置
   *
   * @param kc
   */
  def loadProperties(kc: KhaosContext): Unit = {
    val mysqlProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.mysql.sink.").toMap
    log.info("MysqlSink Properties")
    mysqlProperties.foreach { case (k, v) => log.info(k + "   " + v) }
    _jdbc_url_param = mysqlProperties.getOrElse(MysqlConstants.MODULE_MYSQL_SINK_JDBC_URL_PARAM, "")
  }

  /**
   * 转换DF
   *
   * 修改字段名以及填充默认值
   *
   * @param data
   * @param extractFields
   * @return
   */
  def convertDataFrame(data: DataFrame, extractFields: List[ExtractFieldInfo], columnEntiy: util.List[DmTableColumn]): DataFrame = {
    //log.info(s"MySQLSink before convert DF schema is: ${data.printSchema}")
    //log.info(s"MySQLSink before convert DF is: ${data.show}")
    //生成[字段名,not_null(true/false)]的map
    val fieldAndNotNull = columnEntiy.asScala.map(colEntiy => {
      val colName: String = colEntiy.getColName
      var not_null: String = ""
      colEntiy.getParams.asScala.foreach(map => {
        map.get("pKey") match {
          case "NOT_NULL" => not_null = map.get("pValue")
          case _ =>
        }
      })
      (colName, not_null)
    }).toMap
    //只复制有连线关系的Column
    val colArr = new ArrayBuffer[Column]()
    for (ef <- extractFields) {
      if (!ef.from_field.trim.equals("")) {
        val to_field: String = ef.field
        val from_field: String = ef.from_field
        colArr += data.col(from_field) as (to_field)
      }
    }
    var value: DataFrame = data.select(colArr: _*)
    //对目标DF进行默认值填充和类型转换，填充前需要先转为String类型
    for (ef <- extractFields) {
      val to_field: String = ef.field
      val data_type = ef.data_type
      val field_length = ef.length
      val from_field: String = ef.from_field
      val default_value: String = ef.field_props.default_value
      if (!from_field.trim.equals("")) {
        //将time类型的值去掉空格，否则写入mysql会报错
        if (data_type.equalsIgnoreCase("TIME")) {
          value = value.withColumn(to_field, trim(value.col(to_field).cast(StringType)))
        } else {
          value = value.withColumn(to_field, value.col(to_field).cast(StringType))
        }
      } else {
        value = value.withColumn(to_field, lit(null).cast(StringType))
      }
      //填充默认值
      if (!default_value.equals("")) {
        value = value.na.fill(default_value, Array(to_field))
      } else if (default_value.equals("") && fieldAndNotNull(to_field).equalsIgnoreCase("true") && from_field.trim.equals("")) {
        log.error(s"目标字段：${to_field}不能为null!")
        throw new Exception(s"目标字段：${to_field}不能为null!")
      }
      //将类型转为目标表所需类型
      if (field_length.isEmpty) {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type)))
      } else {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type, field_length.get)))
      }
    }
    //log.info(s"MySQLSink after convert DF schema is: ${value.printSchema}")
    //log.info(s"MySQLSink after convert DF is: ${value.show}")
    value
  }

  /** 转换DF字段类型 具体转换查看 getDataType()方法 */
  def convertDataType(sinkSchema: List[ExtractFieldInfo], data: DataFrame): DataFrame = {
    var value: DataFrame = data
    /*for (ef <- sinkSchema) {
      val to_field: String = ef.field
      val data_type = ef.data_type
      val field_length = ef.length
      //将类型转为目标表所需类型
      if (field_length.isEmpty) {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type)))
      } else {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type, field_length.get)))
      }
    }*/
    val columns: List[Column] = sinkSchema.map((ef: ExtractFieldInfo) => {
      var column: Column = null
      val to_field: String = ef.field
      val data_type: String = ef.data_type
      val field_length: Option[String] = ef.length
      if (field_length.isEmpty) {
        column = value.col(to_field).cast(getDataType(data_type)) as to_field
      } else {
        column = value.col(to_field).cast(getDataType(data_type, field_length.get)) as to_field
      }
      if (data_type.equals(ColumnType.TIME)) {
        column = trim(column) as to_field
      }
      column
    })

    value.select(columns: _*)
  }

  /** 获取目标字段中对应的值的类型，无字段长度参数 */
  def getDataType(dataType: String): String = {
    var value: String = null
    // `string`, `boolean`, `byte`, `short`, `int`, `long`, * `float`, `double`, `decimal`, `date`, `timestamp`.
    value = dataType match {
      case ColumnType.DATE => "date"
      case ColumnType.TIME => "string"
      case ColumnType.DATETIME => "timestamp"
      case _ => "string"
    }
    value
  }

  /** 获取目标字段中对应的值的类型，有字段长度参数 */
  def getDataType(dataType: String, fieldLength: String): String = {
    var value: String = null
    // `string`, `boolean`, `byte`, `short`, `int`, `long`, * `float`, `double`, `decimal`, `date`, `timestamp`.
    value = dataType match {
      case ColumnType.NUMBER => "long"
      case ColumnType.STRING => "string"
      case ColumnType.DECIMAL => s"decimal($fieldLength)"
      case ColumnType.DATE => "date"
      case ColumnType.TIME => "string"
      case ColumnType.DATETIME => "timestamp"
      case _ => "string"
    }
    value
  }
}