package com.fine.spark.connector.mysql.output

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializeFilter
import com.fine.spark.connector.base.beans.BaseConfig
import com.fine.spark.connector.base.worker.BaseWorker
import com.fine.spark.connector.mysql.beans.MysqlOutputConfig
import org.apache.commons.collections.CollectionUtils
import org.apache.spark.sql.{DataFrameWriter, Row, SparkSession}

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.util
import scala.collection.convert.ImplicitConversions._

/**
 * @author jayce
 * @date 2021/11/20 11:04 PM
 * @version 1.0
 */
class MysqlOutputWorker extends BaseWorker {

  def executePreSQL(mysqlConfig: MysqlOutputConfig): Unit = {
    logger.info(s"connect url: ${mysqlConfig.url}")
    Class.forName(mysqlConfig.driver)
    val conn: Connection = DriverManager.getConnection(mysqlConfig.url, mysqlConfig.user, mysqlConfig.password)
    mysqlConfig.preSQL.foreach(sql => {
      logger.info(s"start execute preSQL: $sql")
      val stmt: PreparedStatement = conn.prepareStatement(sql)
      val result: Int = stmt.executeUpdate()
      logger.info(s"execute preSQL success, result: $result.")
      stmt.close()
    })
    conn.close()
  }

  /**
   * 定义处理过程的方法
   *
   * @param bean
   * @param ss
   */
  override def process(config: BaseConfig)(implicit ss: SparkSession): Unit = {
    val mysqlConfig: MysqlOutputConfig = config.asInstanceOf[MysqlOutputConfig]
    // 执行前置sql逻辑，一般作为删除操作
    Option(mysqlConfig.preSQL).filter(pre => CollectionUtils.isNotEmpty(pre))
      .foreach(_ => executePreSQL(mysqlConfig))

    val filtered: util.HashMap[String, String] = filterValues(mysqlConfig)
    if (mysqlConfig.options != null && mysqlConfig.options.nonEmpty) {
      for ((key, v) <- mysqlConfig.options) {
        filtered.put(key, v)
        logger.info(s"use jdbc opts - $key -> $v")
      }
    }

    logger.info(s"jdbc output, start save '${mysqlConfig.srcTable}' to '${mysqlConfig.dstTable}'")
    val t1 = System.currentTimeMillis()
    val dfWriter: DataFrameWriter[Row] = ss.table(mysqlConfig.srcTable).write.mode(mysqlConfig.mode).format("jdbc")
    filtered.put("dbtable", mysqlConfig.dstTable)
    dfWriter.options(filtered).save()
    logger.info(s"jdbc output, save '$mysqlConfig.srcTable' to '$mysqlConfig.dstTable' success cost ${System.currentTimeMillis() - t1}.")
  }

  /**
   * spark options 处理只支持 string 值
   */
  def filterValues(item: BaseConfig): java.util.HashMap[String, String] = {
    val json = JSON.toJSONString(item, new Array[SerializeFilter](0))
    val baseMap = JSON.parseObject(json, classOf[java.util.HashMap[String, Object]])
    val res = new java.util.HashMap[String, String]()
    baseMap.foreach { case (key, value) =>
      value match {
        case str: String => res.put(key, str)
        case _ =>
      }
    }
    res
  }
}
