package com.kingsoft.dc.khaos.module.spark.preprocess.transform

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.metadata.preprocess.transform.FilterInfo
import com.kingsoft.dc.khaos.module.spark.util.DataframeUtils
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.DataFrame
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

/**
  * create by yansu on 2019/07/25 10:24
  */
class Filter extends TransformStrategy with Logging {
  private var filterInfo: FilterInfo = _

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets: Seq[Dependency]): Seq[(String, DataFrame)] = {

    // 解析config
    implicit val formats = DefaultFormats
    filterInfo = parse(config, true).extract[FilterInfo]
    var resDataFrame: DataFrame = null

    val filterDataFrame = kc.structData[DataFrame](dependences.head.getEdge())._2

    // 获取过滤条件
    val filter = filterInfo.filter
    if (filter == null || filter == "") {
      log.error("can't find filter condition!")
      throw new Exception("can't find filter condition!")
    }
    // 对数据进行过滤
    log.info("开启Filter算子...")
    val filterLimit = filterInfo.limit.getOrElse("")
    if (filterLimit.toString != null && filterLimit.toString != "") {
      log.info("limit ==> " + filterLimit.toString)
      resDataFrame = filterDataFrame.filter(filter).limit(filterLimit.toInt)
    } else {
      resDataFrame = filterDataFrame.filter(filter)
    }

    if (true == false) {
      val str = "21202F2938212B3E22272626252E434D"
    }
    // 缓存功能
    if ("true".equals(filterInfo.advanced_options.cache_option.on_off)) {
      resDataFrame = DataframeUtils.cacheDataframe(resDataFrame, this)
    }
    // 分区功能
    if ("true".equals(filterInfo.advanced_options.partition.on_off)) {
      resDataFrame = DataframeUtils.repartionDataframe(resDataFrame, filterInfo.advanced_options.partition.nums.toInt, this)
    }
    // .
    addResult(targets.head, resDataFrame)
  }

  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependences: Seq[Dependency]) = {

    val schema = if (filterInfo.extract_fields.isEmpty) {
      kc.schemaChannel.getSchema(dependences.head.getSource())
    } else {
      val schemaList = filterInfo.extract_fields.map { optInfo =>
        KhaosStructField(optInfo.field, optInfo.data_type)
      }
      new Schema(schemaList)
    }
    schema
  }
}