package cn.getech.data.development.function

import cn.getech.data.development.sink.jdbc.config.CustomTableConfig
import org.apache.flink.core.io.SimpleVersionedSerializer
import org.apache.flink.streaming.api.functions.sink.filesystem.BucketAssigner
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer
import org.apache.flink.types.Row
import org.slf4j.{Logger, LoggerFactory}

/**
 * csv 动态分区
 * @param conf 配置参数
 */
class HDFSBulkCSVAssigner(conf: CustomTableConfig) extends BucketAssigner[Row, String] {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass)

  override def getBucketId(element: Row, context: BucketAssigner.Context): String = {
    var path = ""
    if (!conf.isPartitionTable) return path

    for (i <- 0 until conf.getParamSize) {
      val bean = conf.getParams(i)
      if (bean.isPartition) { // 分区字段
        if (conf.isDynamicPartition) { // 动态分区
          val name = element.getField(i)
          path += s"${bean.field_name}=$name/"
        } else { // 非动态
          if (bean.partitionValue.startsWith("${") && bean.partitionValue.endsWith("}")) { // 解析格式: ${yyyyMMdd}
            val name = element.getField(i)
            path += s"${bean.field_name}=$name/"
          } else if (bean.partitionValue.isEmpty) { // 静态值为空，转为动态值处理
            val name = element.getField(i)
            path += s"${bean.field_name}=$name/"
          } else { // 静态数据
            path += s"${bean.field_name}=${bean.partitionValue}/"
          }
        }
      }
    }
    logger.info(s"path == $path")
    path
  }

  override def getSerializer: SimpleVersionedSerializer[String] = SimpleVersionedStringSerializer.INSTANCE
}
