package cn.getech.data.development.bean

import cn.getech.data.development.bean.jdbc.{JDBCFieldBean, JDBCSqlParserResourceBean, KafkaResourceBean}
import cn.getech.data.development.enums.{DelimitFormat, FlinkStreamStartUpMode, OutputFormat}
import com.alibaba.fastjson.{JSON, JSONObject}
import org.slf4j.{Logger, LoggerFactory}

/**
 * 实时同步
 * @param flinkJson json文件路径
 */
case class FlinkStreamSyncHiveObj(flinkJson: String) extends FlinkStreamJsonObj {

  var kafkaResource: KafkaResourceBean = new KafkaResourceBean
  var jdbc: JDBCSqlParserResourceBean = null
  var jobName: String = ""
  var consumerByteRate: String = ""

  override def jsonParse: Unit = {
    val objs = load(flinkJson)
    jobName = objs.getString("flinkJobName")
    consumerByteRate = objs.getString("consumerByteRate")

    val kafkaObj = objs.getJSONObject("kafkaResource")
    kafkaResource.typename = kafkaObj.getString("typename")
    kafkaResource.topic = kafkaObj.getString("topic")
    kafkaResource.startUp = kafkaObj.getString("startUp")
    kafkaResource.startupMode = FlinkStreamStartUpMode.withName(kafkaResource.startUp)
    kafkaResource.params = jsonArray(kafkaObj, "fields")
    kafkaResource.format = kafkaObj.getString("format")
    kafkaResource.formatType = OutputFormat.withName(kafkaResource.format)
    kafkaResource.delimitFormat = kafkaObj.getIntValue("delimitFormat")
    kafkaResource.delimitFormatType = DelimitFormat.getChar(kafkaResource.delimitFormat)

    // jdbc通用模型数据
    val jdbcObj = objs.getJSONObject("resource")
    jdbc = FlinkStreamJDBCObj.jsonParse(jdbcObj)
  }

  private def jsonArray(obj: JSONObject, key: String): Array[JDBCFieldBean] = {
    obj.getJSONArray(key).toArray.map(x => {
      val jsonObj = JSON.parseObject(x.toString)
      val field_name = jsonObj.getString("field_name")
      val class_type = jsonObj.getString("class_type")
      val isPartition = jsonObj.getBooleanValue("isPartition")
      val parsePartitionPath = jsonObj.getOrDefault("parsePartitionPath", "").toString
      val partitionValue = jsonObj.getOrDefault("partitionValue", "").toString
      JDBCFieldBean(field_name, class_type, isPartition, parsePartitionPath, partitionValue)
    })
  }

  override def toString: String = {
    s"""
       | obj:
       | jobName: ${jobName}
       | consumerByteRate： ${consumerByteRate}
       |
       | kafkaObj: ${kafkaResource.toString}
       |
       | jdbcObj: ${jdbc.toString}
       |""".stripMargin
  }
}
