package com.kingsoft.dc.khaos.module.spark.preprocess.transform

import com.alibaba.fastjson.{JSON, JSONArray}
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.metadata.preprocess.transform.{FieldsInfo, UnionInfo}
import com.kingsoft.dc.khaos.module.spark.util.DataframeUtils
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.{Column, DataFrame}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

import scala.collection.mutable.ArrayBuffer

/**
  * create by yansu on 2019/07/17 20:21
  */
class Union extends TransformStrategy with Logging {
  private var unionInfo: UnionInfo = _

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets: Seq[Dependency]): Seq[(String, DataFrame)] = {

    // 解析config
    implicit val formats = DefaultFormats
    unionInfo = parse(config, true).extract[UnionInfo]
    val firstColumns = new ArrayBuffer[Column]()
    val secondColumns = new ArrayBuffer[Column]()

    import org.apache.spark.sql.functions._
    val jSONObject = JSON.parseObject(config)
    var tmp = true
    val value = jSONObject.getJSONArray("union_tables")
    val iter = value.iterator()
    while (iter.hasNext) {
      val next = iter.next().asInstanceOf[JSONArray]
      if (tmp == true) {
        val value = next.iterator()
        while (value.hasNext) {
          val nx = value.next().toString
          val info = parse(nx, true).extract[FieldsInfo]
          firstColumns += col(info.field)
        }
      } else {
        val value = next.iterator()
        while (value.hasNext) {
          val nx = value.next().toString
          val info = parse(nx, true).extract[FieldsInfo]
          secondColumns += col(info.field)
        }
      }
      tmp = false
    }

    // 根据依赖关系取出df
    val depLen = dependences.length
    if (depLen <= 1) {
      println("You cannot use union because you have less than one table")
      //System.exit(-1)
      throw new Exception("You cannot use union because you have less than one table")
    }
    //结果df
    println("开启Union算子...")
    var resDataFrame: DataFrame = kc.structData[DataFrame](dependences.filter(_.targetSeq.equalsIgnoreCase("0")).head.getEdge())._2
    resDataFrame = resDataFrame.select(firstColumns: _*)
    for (i <- 1 to depLen - 1) {
      var newDataFrame = kc.structData[DataFrame](dependences.filter(_.targetSeq.equalsIgnoreCase(i.toString)).head.getEdge())._2
      newDataFrame = newDataFrame.select(secondColumns: _*)
      resDataFrame = resDataFrame.union(newDataFrame)
    }
    // 缓存功能
    if ("true".equals(unionInfo.advanced_options.cache_option.on_off)) {
      resDataFrame = DataframeUtils.cacheDataframe(resDataFrame, this)
    }
    // 分区功能
    if ("true".equals(unionInfo.advanced_options.partition.on_off)) {
      resDataFrame = DataframeUtils.repartionDataframe(resDataFrame, unionInfo.advanced_options.partition.nums.toInt, this)
    }
    // 改名
    val colArr = resDataFrame.columns
    for (i <- colArr.indices) {
      // _0代表来自第一张表
      val newName = colArr(i).toString + "_0"
      resDataFrame = resDataFrame.withColumnRenamed(colArr(i).toString, newName)
    }
    // 去重
    if (unionInfo.union_type.equalsIgnoreCase("union")) {
      resDataFrame = resDataFrame.distinct()
    }
    addResult(targets.head, resDataFrame)
  }

  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependences: Seq[Dependency]): Schema = {
    if (true == false) {
      val str = "21202F2938212B3E22272626252E434D"
    }
    val schema = if (unionInfo.union_tables.head.fieldInfo.isEmpty) {
      kc.schemaChannel.getSchema(dependences.head.getSource())
    } else {
      val schemaList = unionInfo.union_tables.head.fieldInfo.map { optInfo =>
        KhaosStructField(optInfo.field, optInfo.data_type)
      }
      new Schema(schemaList)
    }
    schema
  }
}
