package com.kingsoft.dc.khaos.module.spark.sink

import java.util
import java.util.Properties

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.module.spark.constants.{ColumnType, MetaDataConstants, OracleConstants, SchedulerConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.sink.{ExtractFieldInfo, OracleConfig}
import com.kingsoft.dc.khaos.module.spark.model.center.metric.SyncProcessDataMetric
import com.kingsoft.dc.khaos.module.spark.model.{MetaDataEntity, RelationDataStatusInfo}
import com.kingsoft.dc.khaos.module.spark.util.{CenterMetricUtils, DataframeUtils, MetaUtils, TableSplitUtils}
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.{Column, DataFrame}
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.collection.JavaConverters._

import scala.collection.mutable.ArrayBuffer

/**
  * Created by WANGYING15 on 2019/6/13.
  */
class OracleSinkDbs extends SinkStrategy with Logging {

  private var oracleMeta: MetaDataEntity = null
  private var oracleConfig: OracleConfig = null
  private var _dbname_type_switch = false

  /** 数据输出 */
  override def sink(kc: KhaosContext,
                    module_id: String,
                    config: JObject,
                    schema: Schema,
                    dataFrame: DataFrame): this.type = {

    init(kc, config)

    val connect = oracleMeta.dsOracleConnect
    val host = connect.getHost
    val connectType = connect.getConnectType
    val userName = connect.getUsername
    val passWord = connect.getPassword
    val instanceName = connect.getInstanceName

    // 企业云中国人寿定制oracle开关
    var dbName = ""
    if (_dbname_type_switch) {
      dbName = oracleConfig.db_name
    } else {
      dbName = userName
    }

    val tblName = oracleConfig.table_name
    val writeType = oracleConfig.write_option.toLowerCase
    val extractFields = oracleConfig.extract_fields

    var convertDF: DataFrame = setDefaultValue(extractFields, dataFrame)
    convertDF = convertDataType(extractFields, convertDF)

    val resultDF = convertDF

    val url = getOracleConnectTypeURL(host, instanceName, connectType)
    log.info(s"Oracle jdbcUrl is: $url")
    val driver = "oracle.jdbc.driver.OracleDriver"
    val prop = new Properties
    prop.put("driver", driver)
    prop.put("user", userName)
    prop.put("password", passWord)
    //处理ojdbc6驱动，osuser>30 报错问题
    val osuser = System.getProperty("user.name")
    if (osuser.length > 30)
      prop.put("oracle.jdbc.v$session.osuser", osuser.substring(0, 30))

    //当savemode是overwrite时，truncate在Oracle中的表，而不是删除再重建其现有的表。
    if ("overwrite".equals(writeType)) {
      prop.put("truncate", "true")
    }

    val (resultData, accumulator) = DataframeUtils.calculateDataNum(kc, resultDF, "OracleSink")

    resultData.write.mode(writeType).jdbc(url, s"$dbName.${tblName}", prop)

    this
  }

  /** 初始化参数 */
  def init(kc: KhaosContext, config: JObject): Unit = {
    implicit val formats = DefaultFormats
    oracleConfig = config.extract[OracleConfig]

    val PROJECT_ID: Int = kc.conf.getString(SchedulerConstants.PROJECT_ID).toInt
    var metaParamsMap: Map[String, Any] = oracleConfig.extender.meta.params.values
    metaParamsMap = metaParamsMap.updated("project_id", PROJECT_ID)
    import org.json4s.DefaultFormats
    import org.json4s.native.Json
    val metaJson: String = Json(DefaultFormats).write(metaParamsMap)

    // load config 配置文件参数
    loadProperties(kc)

    //获取元数据
    oracleMeta = MetaUtils.getOracleDs(kc,
      oracleConfig.db_name,
      oracleConfig.table_name,
      oracleConfig.extender.meta.clazz,
      metaJson,
      this)

  }


  /**
    * load config properties 配置
    *
    * @param kc
    */
  def loadProperties(kc: KhaosContext): Unit = {
    val oracleProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.oracle.source.").toMap
    log.info("OracleSink Properties")
    oracleProperties.foreach { case (k, v) => log.info(k + "   " + v) }
    _dbname_type_switch = oracleProperties.getOrElse(OracleConstants.MODULE_ORACLE_SOURCESINK_DBNAMETYPE_SWITCH, "false").toBoolean
  }


  /** rename and 设置默认值 */
  def setDefaultValue(columnInfoMetaList: List[ExtractFieldInfo], data: DataFrame): DataFrame = {
    //只复制有连线关系的Column
    val colArr = new ArrayBuffer[Column]()
    for (ef <- columnInfoMetaList) {
      val to_field: String = ef.field
      val from_field: String = ef.from_field
      if (!ef.from_field.trim.equals("")) {

        colArr += data.col(from_field) as to_field
      } else {
        colArr += lit(null).cast(StringType) as to_field
      }
    }

    var value: DataFrame = data.select(colArr: _*)

    columnInfoMetaList.foreach(excol => {
      if (excol.field_props.default_value.nonEmpty) {
        val field: String = excol.field
        val default_value: String = excol.field_props.default_value
        value = value.withColumn(field, value.col(field).cast(StringType))
        value = value.na.fill(default_value, Array(field))
      } else {

      }
    })
    value
  }

  /** 通过不同的连接方式获取oracle对应的jdbcUrl，host中将ip:port拼接在了一起 */
  def getOracleConnectTypeURL(host: String, instanceName: String, connectType: String): String = {
    var url: String = null
    connectType match {
      case "SID" => {
        url = s"jdbc:oracle:thin:@$host:$instanceName"
      }
      case "ServiceName" => {
        url = s"jdbc:oracle:thin:@//$host/$instanceName"
      }
      case "RAC" => {
        val hostArray = host.split(",")
        var address = ""
        for (i <- 0 until hostArray.length) {
          //(ADDRESS=(PROTOCOL=TCP)(HOST=x.x.x.x)(PORT=1521))
          address += s"(ADDRESS=(PROTOCOL=TCP)(HOST=${hostArray(i).split(":")(0)})(PORT=${hostArray(i).split(":")(1)}))"
        }
        //jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=x.x.x.x)(PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=x.x.x.y)(PORT=1521)))(LOAD_BALANCE=yes)(FAILOVER=ON)(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=db.domain)))
        url = s"jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS_LIST=${address})(LOAD_BALANCE=yes)(FAILOVER=ON)(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=$instanceName)))"
      }
      case _ =>
    }
    url
  }

  /**
    * 转换DF
    *
    * 修改字段名以及填充默认值
    *
    * @param data
    * @param extractFields
    * @return
    */
  def convertDataFrame(data: DataFrame, extractFields: List[ExtractFieldInfo], columnEntiy: util.List[DmTableColumn]): DataFrame = {
    //log.info(s"OracleSink before convert DF schema is: ${data.printSchema}")
    //log.info(s"OracleSink before convert DF is: ${data.show}")
    //生成[字段名,not_null(true/false)]的map
    val fieldAndNotNull = columnEntiy.asScala.map(colEntiy => {
      val colName: String = colEntiy.getColName
      var not_null: String = ""
      colEntiy.getParams.asScala.foreach(map => {
        map.get("pKey") match {
          case "NOT_NULL" => not_null = map.get("pValue")
          case _ =>
        }
      })
      (colName, not_null)
    }).toMap
    //只复制有连线关系的Column
    val colArr = new ArrayBuffer[Column]()
    for (ef <- extractFields) {
      if (!ef.from_field.trim.equals("")) {
        val to_field: String = ef.field
        val from_field: String = ef.from_field
        colArr += data.col(from_field) as (to_field)
      }
    }
    var value: DataFrame = data.select(colArr: _*)
    //对目标DF进行默认值填充和类型转换，填充前需要先转为String类型
    for (ef <- extractFields) {
      val to_field: String = ef.field
      val data_type = ef.data_type
      val field_length = ef.length
      val from_field: String = ef.from_field
      val default_value: String = ef.field_props.default_value
      if (!from_field.trim.equals("")) {
        value = value.withColumn(to_field, value.col(to_field).cast(StringType))
      } else {
        value = value.withColumn(to_field, lit(null).cast(StringType))
      }
      //填充默认值
      if (!default_value.equals("")) {
        value = value.na.fill(default_value, Array(to_field))
      } else if (default_value.equals("") && fieldAndNotNull(to_field).equalsIgnoreCase("true") && from_field.trim.equals("")) {
        log.error(s"目标字段：${to_field}不能为null!")
        throw new Exception(s"目标字段：${to_field}不能为null!")
      }
      //将类型转为目标表所需类型
      if (field_length.isEmpty) {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type)))
      } else {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type, field_length.get)))
      }
    }
    //log.info(s"OracleSink after convert DF schema is: ${value.printSchema}")
    //log.info(s"OracleSink after convert DF is: ${value.show}")
    value
  }

  /** 转换DF字段类型 具体转换查看 getDataType()方法 */
  def convertDataType(sinkSchema: List[ExtractFieldInfo], data: DataFrame): DataFrame = {
    var value: DataFrame = data
    /*for (ef <- sinkSchema) {
      val to_field: String = ef.field
      val data_type = ef.data_type
      val field_length = ef.length
      //将类型转为目标表所需类型
      if (field_length.isEmpty) {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type)))
      } else {
        value = value.withColumn(to_field, value.col(to_field).cast(getDataType(data_type, field_length.get)))
      }
    }*/
    val columns: List[Column] = sinkSchema.map((ef: ExtractFieldInfo) => {
      var column: Column = null
      val to_field: String = ef.field
      val data_type: String = ef.data_type
      val field_length: Option[String] = ef.length
      if (field_length.isEmpty) {
        column = value.col(to_field).cast(getDataType(data_type))
      } else {
        column = value.col(to_field).cast(getDataType(data_type, field_length.get))
      }
      column
    })

    value.select(columns: _*)
  }

  /** 获取目标字段中对应的值的类型，无字段长度参数 */
  def getDataType(dataType: String): String = {
    var value: String = null
    // `string`, `boolean`, `byte`, `short`, `int`, `long`, * `float`, `double`, `decimal`, `date`, `timestamp`.
    value = dataType match {
      case ColumnType.DATE => "date"
      case ColumnType.TIME => "string"
      case ColumnType.DATETIME => "timestamp"
      case _ => "string"
    }
    value
  }

  /** 获取目标字段中对应的值的类型，有字段长度参数 */
  def getDataType(dataType: String, fieldLength: String): String = {
    var value: String = null
    // `string`, `boolean`, `byte`, `short`, `int`, `long`, * `float`, `double`, `decimal`, `date`, `timestamp`.
    value = dataType match {
      case ColumnType.NUMBER => s"decimal($fieldLength)"
      case ColumnType.STRING => "string"
      case ColumnType.DECIMAL => s"decimal($fieldLength)"
      case ColumnType.DATE => "date"
      case ColumnType.TIME => "string"
      case ColumnType.DATETIME => "timestamp"
      case _ => "string"
    }
    value
  }
}