package com.kingsoft.dc.khaos.module.spark.preprocess.transform

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.metadata.preprocess.transform.{JoinInfo}
import com.kingsoft.dc.khaos.module.spark.util.DataframeUtils
import org.apache.spark.sql.{Column, DataFrame}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

import scala.collection.mutable.ArrayBuffer

/**
  * join算子
  * create by goosoog 2019/7/24.
  *
  */
class Join extends TransformStrategy {

  private var joinInfo: JoinInfo = _

  /**
    * 组织连接条件
    *
    * @param leftDataFrame
    * @param rightDataFrame
    * @return
    */
  def concatJoinExprs(leftDataFrame: DataFrame, rightDataFrame: DataFrame): Column = {
    var joinExprs: Column = null
    var count = 0
    if (joinInfo.join_keys.size <= 0) {
      throw new Exception("==>join连接条件未设置!")
    }
    for (keys <- joinInfo.join_keys) {
      count = count + 1
      //数据类型相同则进行连接
      if (keys.left.data_type == keys.right.data_type) {
        if (count == 1) {
          joinExprs = leftDataFrame(keys.left.field) === rightDataFrame(keys.right.field)
        } else {
          joinExprs = joinExprs && (leftDataFrame(keys.left.field) === rightDataFrame(keys.right.field))
        }
      } else {
        throw new Exception(s"==>join连接字段类型不一致 [left.field=${keys.left.field} left.datatype=${keys.left.data_type}] --> [right.field=${keys.right.field} right.datatype=${keys.right.data_type}]")
      }
    }
    joinExprs
  }

  /**
    * 映射join类型
    * Default `inner`. Must be one of: `inner`, `cross`, `outer`, `full`, `full_outer`, `left`, `left_outer`, `right`, `right_outer`, `left_semi`, `left_anti`
    *
    * @param join_type
    * @return
    */
  def matchJoinType(join_type: String): String = {
    if (joinInfo.join_type.contains("inner")) {
      "inner"
    } else if (joinInfo.join_type.contains("left") && joinInfo.join_type.contains("outer")) {
      "left_outer"
    } else if (joinInfo.join_type.contains("right") && joinInfo.join_type.contains("outer")) {
      "right_outer"
    } else if (joinInfo.join_type.contains("full") && joinInfo.join_type.contains("outer")) {
      "full_outer"
    } else if (joinInfo.join_type.contains("left")) {
      "left"
    } else if (joinInfo.join_type.contains("right")) {
      "right"
    } else {
      throw new Exception(s"==>不支持的join类型! joinType=${joinInfo.join_type}")
    }
  }

  /**
    * 构建输出字段
    *
    * @return
    */
  def buildOutputColumns(leftDataFrame: DataFrame, rightDataFrame: DataFrame): ArrayBuffer[Column] = {
    val length = joinInfo.left_fields.size + joinInfo.right_fields.size
    val outputFields = new ArrayBuffer[Column](length)
    for (left <- joinInfo.left_fields) {
      outputFields.append(leftDataFrame(left.field) as left.field + "_left")
    }
    for (right <- joinInfo.right_fields) {
      outputFields.append(rightDataFrame(right.field) as right.field + "_right")
    }
    outputFields
  }

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets:Seq[Dependency]): Seq[(String,DataFrame)] = {
    //获取配置信息
    implicit val formats = DefaultFormats
    joinInfo = parse(config, true).extract[JoinInfo]
    //获取上游信息
    //左表
    val (leftSchema, leftDataFrame) = kc.structData[DataFrame](dependences.filter(_.targetSeq.equalsIgnoreCase("0")).head.getEdge())
    //右表
    val (rightSchema, rightDataFrame) = kc.structData[DataFrame](dependences.filter(_.targetSeq.equalsIgnoreCase("1")).head.getEdge())

    //连接条件
    val joinExprs: Column = concatJoinExprs(leftDataFrame, rightDataFrame)
    //连接类型
    val joinType: String = matchJoinType(joinInfo.join_type)
    val joinDF = leftDataFrame.join(rightDataFrame, joinExprs, joinType)
    val outputColumns = buildOutputColumns(leftDataFrame, rightDataFrame)
    var finalDF = joinDF.select(outputColumns: _*)

    if ("true".equals(joinInfo.advanced_options.cache_option.on_off)) {
      finalDF = DataframeUtils.cacheDataframe(finalDF, this)
    }
    if ("true".equals(joinInfo.advanced_options.partition.on_off)) {
      finalDF = DataframeUtils.repartionDataframe(finalDF, joinInfo.advanced_options.partition.nums.toInt, this)
    }
    addResult(targets.head,finalDF)
  }

  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependencies: Seq[Dependency]): Schema = {

    val schemafields = new ArrayBuffer[KhaosStructField]()
    val left_fields = joinInfo.left_fields
    val right_fields = joinInfo.right_fields
    for (field <- left_fields) {
      schemafields.append(KhaosStructField(field.field + "_left", field.data_type))
    }
    for (field <- right_fields) {
      schemafields.append(KhaosStructField(field.field + "_right", field.data_type))
    }
    new Schema(schemafields)
  }
}
