package com.kingsoft.dc.khaos.module.spark.sink

import java.util
import java.util.Properties

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.api.DmTableSplit
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.module.spark.constants.CommonConstants
import com.kingsoft.dc.khaos.module.spark.metadata.sink.GreenPlumSinkConfig
import com.kingsoft.dc.khaos.module.spark.model.RelationDataStatusInfo
import com.kingsoft.dc.khaos.module.spark.model.center.metric.SyncProcessDataMetric
import com.kingsoft.dc.khaos.module.spark.request.model.JdbcConnectEntity
import com.kingsoft.dc.khaos.module.spark.util.TableSplitUtils.StrategyValueEnum
import com.kingsoft.dc.khaos.module.spark.util.{CenterMetricUtils, DataframeUtils, TableSplitUtils}
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.types.{StringType, TimestampType}
import org.apache.spark.sql.{Column, DataFrame, SaveMode}
import org.json4s.{DefaultFormats, JsonAST}
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer

/**
 * create by yansu on 2019/12/18 11:06
 */
class GreenPlumSink extends SinkStrategy with Logging {
  private var _gpConfig: GreenPlumSinkConfig = null
  private var _kc: KhaosContext = null
  private var _host: String = ""
  private var _port: String = ""
  private var _username: String = ""
  private var _password: String = ""
  private var _instansename: String = ""
  private var dbName = ""
  private var tblName = ""
  private var connectEntity: JdbcConnectEntity = null
  private var _gpColsInfo: util.List[DmTableColumn] = null
  private var tableSplit: DmTableSplit = null
  private var tblNameAndDF: mutable.HashMap[String, DataFrame] = null
  private val writeModeMap = Map("append" -> SaveMode.Append,
    "overwrite" -> SaveMode.Overwrite)

  /** 数据输出 */
  def sink(kc: KhaosContext,
           module_id: String,
           config: JsonAST.JObject,
           schema: Schema,
           dataSet: DataFrame,
           ds_config: Map[String, String],
           gp_cols_info: util.List[DmTableColumn]): Any = {

    implicit val formats = DefaultFormats
    val gpConfig = config.extract[GreenPlumSinkConfig]
    this._gpConfig = gpConfig
    this._kc = kc
    //库表名加引号
    dbName = s"""\"${_gpConfig.db_name}\""""
    tblName = s"""\"${_gpConfig.table_name}\""""

    // mppsink中传过来的
    _gpColsInfo = gp_cols_info
    //    MetaUtils.checkWriteAuth(kc,
    //      gpConfig.db_name,
    //      gpConfig.table_name,
    //      gpConfig.extender.auth.clazz,
    //      compact(render(gpConfig.extender.auth.params)))

    initialize(ds_config)
    writeRealTable(dataSet)
  }

  def writeRealTable(data: DataFrame) = {
    // 是否是分表
    tableSplit = TableSplitUtils.getTableSplit(_kc,
      _gpConfig.db_name,
      _gpConfig.table_name,
      _gpConfig.extender.meta.clazz,
      compact(render(_gpConfig.extender.meta.params)))
    var splitValues: List[String] = null
    var newSplitValues: Map[String, String] = null
    var tblList: List[String] = null

    // 改名设置默认值
    var colArr = new ArrayBuffer[Column]()
    val sdvData = DataframeUtils.setDefaultValue(_gpConfig.extract_fields, _gpColsInfo, data)
    for (elem <- sdvData.columns) {
      colArr += sdvData.col(elem)
    }
    // 获取分表列表
    if (tableSplit != null) {
      tableSplit.getStrategyType match {
        case TableSplitUtils.StrategyTypeEnum.CUSTOM_ENUM => {
          val splitValues = tableSplit.getStrategyValue.split(",").toList
          //获取分表名称列表
          tblList = TableSplitUtils.getRealTable(_kc,
            _gpConfig.db_name,
            _gpConfig.table_name,
            _gpConfig.extender.meta.clazz,
            compact(render(_gpConfig.extender.meta.params)), this, tableSplit, "in", splitValues)

        }
        case TableSplitUtils.StrategyTypeEnum.DATETIME => {
          splitValues = sdvData.select(colArr: _*).select(col(tableSplit.getSplitColName).cast(StringType)).distinct().collect().map(row => {
            row.getAs[String](tableSplit.getSplitColName)
          }).toList.filter(_ != null)

          tableSplit.getStrategyValue match {
            //2019-01-01 ==> 2019/201901/20190101
            case StrategyValueEnum.year => newSplitValues = splitValues.map(sv => (sv.replaceAll("-", "").substring(0, 4), sv)).toMap; splitValues = splitValues.map(_.replaceAll("-", "").substring(0, 4))
            case StrategyValueEnum.month => newSplitValues = splitValues.map(sv => (sv.replaceAll("-", "").substring(0, 6), sv)).toMap; splitValues = splitValues.map(_.replaceAll("-", "").substring(0, 6))
            case StrategyValueEnum.day => newSplitValues = splitValues.map(sv => (sv.replaceAll("-", "").substring(0, 8), sv)).toMap; splitValues = splitValues.map(_.replaceAll("-", "").substring(0, 8))
          }
          tblList = TableSplitUtils.getRealTable(_kc,
            _gpConfig.db_name,
            _gpConfig.table_name,
            _gpConfig.extender.meta.clazz,
            compact(render(_gpConfig.extender.meta.params)), this, tableSplit, "in", splitValues)
        }
        case TableSplitUtils.StrategyTypeEnum.BUSSINESS => {
          var suffixValue = ""
          if (_gpConfig.sub_table.on_off.trim.toLowerCase == "true") {
            suffixValue = _gpConfig.sub_table.suffix
            if (suffixValue == "" || suffixValue == null) {
              throw new IllegalArgumentException("未正确填写业务分表后缀!")
            }
          } else {
            throw new IllegalArgumentException("未开启业务分表开关!")
          }
          splitValues = List[String](suffixValue)
          tblList = TableSplitUtils.getRealTable(_kc,
            _gpConfig.db_name,
            _gpConfig.table_name,
            _gpConfig.extender.meta.clazz,
            compact(render(_gpConfig.extender.meta.params)), this, tableSplit, "=", splitValues)
        }
      }
    }
    val decimal_switch: Boolean = _kc.conf.getBoolean(CommonConstants.GREENPLUM_DECIMAL_SWITCH, false)
    log.info(s"greenplum decimal_switch: $decimal_switch")
    val targetData = DataframeUtils.buildNewDataframeGreenplum(
      tblList,
      tableSplit,
      _gpConfig.table_name,
      _gpColsInfo,
      _gpConfig.extract_fields,
      sdvData,
      connectEntity, decimal_switch)._1
    log.info("===> " + targetData.schema.fields.mkString(","))
    //获得真实物理表和DataFrame
    tblNameAndDF =
      TableSplitUtils.getSinkRealTable(_kc,
        _gpConfig.db_name,
        _gpConfig.table_name,
        this,
        _gpConfig.extender.meta.clazz,
        compact(render(_gpConfig.extender.meta.params)),
        targetData,
        _gpConfig.sub_table)

    //    tblNameAndDF = tblNameAndDF.map(tp => (tp._1.toLowerCase(), tp._2))
    val gpDataStatusInfo = new RelationDataStatusInfo
    // 取出分区表名和DataFrame
    for (tblDF <- tblNameAndDF) {
      val numPartition = DataframeUtils.rePartitions(_kc, targetData, _gpColsInfo)
      // 上报数据相关
      val (resultData, accumulator) = DataframeUtils.calculateDataNum(_kc, tblDF._2, "GreenplumSink")
      write2GP(_kc, tblDF._1, resultData, numPartition)
      // 对所有分表进行累加
      var numTemp: Long = 0
      if (gpDataStatusInfo.getDataNum == null) {
        numTemp = accumulator.value.toLong
      } else {
        numTemp = gpDataStatusInfo.getDataNum.toLong + accumulator.value.toLong
      }
      gpDataStatusInfo.setDataNum(numTemp.toString)
    }
    //上报数据状态
    gpDataStatusInfo.setCover(if (writeModeMap(_gpConfig.write_option.toLowerCase) == "overwrite") true else false)
    DataframeUtils.reportDataStatusRelation(_kc,
      gpDataStatusInfo,
      _gpConfig.db_name,
      _gpConfig.table_name,
      _gpConfig.extender.meta.clazz,
      compact(render(_gpConfig.extender.meta.params)))
    //上报运维中心指标
    val metric: SyncProcessDataMetric = CenterMetricUtils.buildSyncProcessDataMetric(_kc)
    metric.setProcessDataLValue(gpDataStatusInfo.getDataNum.toLong)
    CenterMetricUtils.reportSyncProcessData(metric, _kc)
  }


  /**
   * initialize jdbc connetcion
   */
  def initJdbcConnectInfo() = {
    log.info("Initialize JDBC Connection!")
    val url = getConnectUrl()
    val user = _username
    val password = _password
    val schema = _gpConfig.db_name
    this.connectEntity = new JdbcConnectEntity(url,
      user,
      password,
      schema,
      _gpConfig.table_name)
  }

  /**
   * write to greenplumdb
   *
   * @param kc   KhaosContext
   * @param data DataFrame
   */
  def write2GP(kc: KhaosContext, tableName: String, data: DataFrame, numPartitions: Int) = {
    val timeNoNullDataFrame = processingTimeType(data)

    log.info("Start Writing GreenPlum Database!")
    val url = getConnectUrl()
    val driver = "org.postgresql.Driver"
    val dbTable = s"${dbName}.${tableName}"
    val pro = new Properties()
    pro.put("driver", driver)
    pro.put("user", _username)
    pro.put("password", _password)
    timeNoNullDataFrame
      .repartition(numPartitions)
      .write
      .mode(writeModeMap.apply(_gpConfig.write_option.trim.toLowerCase))
      .jdbc(url, dbTable.trim, pro)
  }

  /**
   * 将DataFrame里的时间类型转换为Timestamp类型
   *
   * @param data DataFrame
   * @return DataFrame
   */
  def processingTimeType(data: DataFrame): DataFrame = {
    var df = data
    for (elem <- _gpConfig.extract_fields) {
      // 读取的时间类型在DataFrame中是String类型,转换成TimestampType类型.
      if (elem.data_type.equalsIgnoreCase("TIME")) {
        df = df.withColumn(elem.field, df.col(elem.field).cast(TimestampType)).as(elem.field)
      }
    }
    df
  }

  /**
   * initialize metadata
   *
   * @param ds_config
   */
  def initialize(ds_config: Map[String, String]) = {
    log.info("Initialize The Metadata!")
    initMetaData(ds_config)
    initJdbcConnectInfo()
  }

  /**
   * get jdbc
   *
   * @return url
   */
  def getConnectUrl(): String = {
    val host = _host
    val port = _port
    val dbName = _instansename
    val url = s"jdbc:postgresql://${host}:${port}/${dbName}"
    log.info("url ==> " + url)
    url
  }

  /**
   * get physical address
   *
   * @param ds_config datasource config
   */
  def initMetaData(ds_config: Map[String, String]) = {
    log.info("Initializes The Physical Address!")
    //    val className = _gpConfig.extender.meta.clazz
    //    implicit val formats = DefaultFormats
    //    val metaParamsJson: ExtendedMetaParams = _gpConfig.extender.meta.params.extract[ExtendedMetaParams]
    //    val metaParamsStr: String =
    //      s"""
    //         |{"project_id":"${metaParamsJson.project_id}","ds_id":"${metaParamsJson.ds_id}","ds_type":"${metaParamsJson.ds_type}","ds_name":"${metaParamsJson.ds_name}"}
    //              """.stripMargin
    //    val dbname = _gpConfig.db_name
    //    val tblname = _gpConfig.table_name
    //
    //    val gpConnect = MetaUtils
    //      .getGreenPlumMeta(_kc, dbname, tblname, className, metaParamsStr, this)
    //      .getDsGreenPlumConnect
    //
    //    _gpColsInfo = MetaUtils
    //      .getGreenPlumMeta(_kc, dbname, tblname, className, metaParamsStr, this)
    //      .getColumnEntiy
    //    _host = gpConnect.getHost
    //    _port = gpConnect.getPort
    //    _username = gpConnect.getUsername
    //    _password = gpConnect.getPassword
    //    _instansename = gpConnect.getInstanceName
    _host = ds_config("host")
    _port = ds_config("port")
    _username = ds_config("username")
    _password = ds_config("password")
    _instansename = ds_config("instansename")
  }
}
