package cn.getech.data.development.function

import cn.getech.data.development.bean.jdbc.JDBCFieldBean
import cn.getech.data.development.sink.jdbc.config.CustomTableConfig
import cn.getech.data.development.utils.DateUtils
import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.types.Row
import org.slf4j.{Logger, LoggerFactory}

/**
 * 解析多层级json数据
 * data: { map: { test: 2020-01-01 }}
 * data.map.test
 * @param conf 配置参数
 */
class HDFSJsonSyncMapFunction(conf: CustomTableConfig) extends MapFunction[String, Row] {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass)

  override def map(value: String): Row = {
    val obj = JSON.parseObject(value)
    val row = new Row(conf.getParams.length)

    for (i <- 0 until conf.getParams.length) {
      val bean: JDBCFieldBean = conf.getParams(i)
      bean.class_type.toLowerCase match {
        // 将无法解析的类型数据转化成string
        case "string" | "map<string,string>" | "array<binary>" | "array<bigint>" =>
          //          val str = obj.getOrDefault(bean.field_name, "").toString
          val str = try {
            obj.getOrDefault(bean.field_name, "").toString
          } catch {
            case _ => {
              ""
            }
          }
          row.setField(i, getValue(str, bean, obj))
        case _ => {
          row.setField(i, getValue(obj.get(bean.field_name), bean, obj))
        }
      }
    }
    row
  }

  private def getValue(text: Any, bean: JDBCFieldBean, obj: JSONObject): Any = {
    var field = text
    // ！自动解析Json分区的路径
    if (!bean.parsePartitionPath.isEmpty) {
      // 手动解析json
      val paths = bean.parsePartitionPath.split("\\.")
      if (paths.length == 1) {
        field = obj.getString(paths(0))
      } else {
        println("path：=============" + obj.getJSONObject(paths(0)))
        var pathObj: JSONObject = obj.getJSONObject(paths(0))
        var path: Any = null
        for (j <- 1 until paths.length)
          if (j < paths.length - 1)
            pathObj = pathObj.getJSONObject(paths(j))
          else
            path = pathObj.get(paths(j))
        field = path.toString
      }
    }

    // 自动添加采集时间字段
    if (bean.field_name == conf.collectionConfig.collectionTimeName) {
      try {
        val format = conf.collectionConfig.collectionTimeFormat.replace("${", "").replace("}", "")
        field = new DateUtils().getPatternTime(format)
      } catch {
        case exception: Exception =>
          logger.error("采集时间字段format格式输入错误: ", conf.collectionConfig.collectionTimeFormat)
          field = new DateUtils().getPatternTime("yyyyMMdd HH:mm:ss")
      }
    }

    // 解析日期格式
    if (bean.partitionValue.startsWith("${") && bean.partitionValue.endsWith("}")) {
      val format = bean.partitionValue.replace("${", "").replace("}", "")

//      try {
//        field = new DateUtils().getPatternTime(format)
//        if (field.toString != obj.getString(bean.field_name)) {
//          field = new DateUtils().customFormat2String(format, field.toString)
//        }
//      } catch {
//        case exception: Exception =>
//          logger.error("解析日期格式错误: ", conf.collectionConfig.collectionTimeFormat)
//      }
      try {
        field = if (field.toString == null || field.toString == "") new DateUtils().getPatternTime(format) else field.toString
        field = new DateUtils().customFormat2String(format, field.toString)
      } catch {
        case exception: Exception =>
          logger.error("解析日期格式错误: ", conf.collectionConfig.collectionTimeFormat)
      }

    }
    field
  }

}
