package com.kingsoft.dc.khaos.module.spark.sink

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.module.spark.constants.SchedulerConstants
import com.kingsoft.dc.khaos.module.spark.metadata.sink.HdfsConfig
import com.kingsoft.dc.khaos.module.spark.util.TechCheckUtils
import org.apache.spark.sql.{DataFrame, SaveMode}
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject

/**
  * Created by czc on 2020/12/28.
  */
class HdfsCheckSink extends SinkStrategy{
  /** 数据输出 */
  override def sink(kc: KhaosContext,
                    module_id: String,
                    config: JObject,
                    schema: Schema,
                    dataFrame: DataFrame): this.type = {
    implicit val formats = DefaultFormats
    val hdfsCheckConfig = TechCheckUtils.getHdfsCheckConfigByKC(kc)
    val nomalfilePath = TechCheckUtils.getHdfsPath(kc.conf.getString(SchedulerConstants.CLUSTER_NAMESPACE), kc.conf.getString(SchedulerConstants.PROXY_USER),hdfsCheckConfig.output_path,hdfsCheckConfig.input_dat_name +".dat")

    //保存检核通过数据
    TechCheckUtils.saveAsFileAbsPath(dataFrame,nomalfilePath ,"|@|",SaveMode.Overwrite)
    this
  }
}
