package com.kingsoft.dc.khaos.module.spark.preprocess.specific

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.Dependency
import com.kingsoft.dc.khaos.module.spark.constants.MppConstants
import com.kingsoft.dc.khaos.module.spark.model.cos.CosAccessConfig
import com.kingsoft.dc.khaos.module.spark.preprocess.transform.TransformStrategy
import com.kingsoft.dc.khaos.module.spark.util.CosApiUtils.getCosAccessAkSk
import com.kingsoft.dc.khaos.module.spark.util.HadoopCosUtils
import com.kingsoft.dc.khaos.util.Logging
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.DataFrame

/**
 * create by yansu on 2019/08/01 20:30
 */
class ClearMppOnCosData extends TransformStrategy with Logging {
  var _cosConfig = new CosAccessConfig
  // 配置文件
  private var _loginTimeout: Int = 6000
  private var _region = "ap-beijing"
  private var _bucket = "1"
  private var _endpoint = "cos.ap-beijing.myqcloud.com"
  private var _delimeter = ";"

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets: Seq[Dependency]): Seq[(String, DataFrame)] = {

    kc.conf.getString("dataSourceType").toLowerCase.trim match {
      case "greenplum" => //do nothing
      case "hashdata" => {
        log.info("开始删除MPP缓存!")
        val jobId = kc.conf.getString("job.inst.id")
        // 加载cos
        _cosConfig = initCosConfig(kc)
        val hadoopConf = HadoopCosUtils.appendCosHadoopConfigs(kc.sparkSession.sparkContext.hadoopConfiguration, _cosConfig)
        kc.sparkSession.sparkContext.hadoopConfiguration.addResource(hadoopConf)
        val fs: FileSystem = FileSystem.get(kc.sparkSession.sparkContext.hadoopConfiguration)
        // 删除缓存数据
        val path = new Path(s"/di/.working_output/${jobId}")
        log.info("删除路径: " + path)
        if (fs.exists(path)) {
          fs.delete(path)
        } else {
          log.info(path + " 不存在")
        }
        if (fs != null) {
          log.info("close cos filesystem")
          fs.close()
        }
      }
      case _ => // do nothing
    }
    val df: DataFrame = null
    addResult(Dependency(), df)
  }

  def initCosConfig(kc: KhaosContext) = {
    loadProperties(kc)
    val cosAccessConfig = getCosAccessAkSk(kc)
    cosAccessConfig.setRegion(_region)
    cosAccessConfig.setEndPoint(_endpoint)
    cosAccessConfig.setBucket(_bucket + "-" + cosAccessConfig.getAppId)
    cosAccessConfig.setDelimeter(_delimeter)
    cosAccessConfig
  }

  /*
初始化配置文件
 */
  def loadProperties(kc: KhaosContext): Unit = {
    try {
      val mppProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.mpp.source.").toMap
      _loginTimeout = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_JDBC_CONNECT_TIMEOUT, MppConstants.DEFAULT_CONNECT_TIMEOUT).toInt
      _bucket = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_BUCKET, "none")
      _endpoint = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_ENDPOINT, "none")
      _region = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_REGION, "none")
      _delimeter = mppProperties.getOrElse(MppConstants.MODULE_MPP_SOURCE_TMP_COS_DELIMETER, "none")
    } catch {
      case e: Exception =>
        log.error("未读取到MPP配置! 改用默认配置")
    }
  }

  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependences: Seq[Dependency]): Any = {
    new Schema(Nil)
  }
}
