package cn.getech.data.development.function

import java.nio.charset.StandardCharsets

import cn.getech.data.development.sink.jdbc.config.CustomTableConfig
import com.alibaba.fastjson.JSONObject
import org.apache.flink.api.common.serialization.BulkWriter
import org.apache.flink.core.fs.FSDataOutputStream
import org.apache.flink.types.Row
import org.apache.flink.util.Preconditions

/**
 * csv 数据转化为string
 * @param conf 配置参数
 */
class HDFSBulkFormatCSVFactory(conf: CustomTableConfig) extends BulkWriter.Factory[Row] {

  override def create(out: FSDataOutputStream): BulkWriter[Row] = {
    val writer = new Writer
    writer.init(out)
  }

  class Writer extends BulkWriter[Row] {
    val charset = StandardCharsets.UTF_8
    var stream: FSDataOutputStream = _

    def init(inputStream: FSDataOutputStream): Writer = {
      stream = Preconditions.checkNotNull(inputStream)
      this
    }

    override def addElement(element: Row): Unit = {
      val sb = new StringBuilder
      for (i <- 0 until conf.getParamSize) {
        if (!conf.getParams(i).isPartition)
          sb.append(element.getField(i)).append(conf.delimitFormat)
      }
      sb.delete(sb.length() - 1, sb.length())
      val str = sb.toString + "\n"
      this.stream.write(str.getBytes(charset))
    }

    override def flush(): Unit = stream.flush()

    override def finish(): Unit = flush()
  }

}