package cn.ipanel.bigdata.utils

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._

/**
 * Author: lzz
 * Date: 2021/12/21 16:21
 */
object Condition {

  def union(data: DataFrame*): DataFrame = {
    var df: DataFrame = null
    data.foreach(d => {
      if (df == null || Util.DF.isEmpty(df)) df = d
      else if (d != null && Util.DF.nonEmpty(d)) df = df.unionAll(d)
    })
    df
  }

  def or(colName: String, colVal: Seq[Any]): org.apache.spark.sql.Column = {
    var cd: org.apache.spark.sql.Column = null
    colVal.foreach((v: Any) => {
      if (cd == null) cd = col(colName) === lit(v)
      else cd = cd or col(colName) === lit(v)
    })
    cd
  }

  def and(colName: String, colVal: Seq[Any]): org.apache.spark.sql.Column = {
    var cd: org.apache.spark.sql.Column = null
    colVal.foreach((v: Any) => {
      if (cd == null) cd = col(colName) === lit(v)
      else cd = cd and col(colName) === lit(v)
    })
    cd
  }

  def and(colName: Seq[String], colVal: Any): org.apache.spark.sql.Column = {
    var cd: org.apache.spark.sql.Column = null
    colName.foreach((v: String) => {
      if (cd == null) cd = col(v) === lit(colVal)
      else cd = cd and col(v) === lit(colVal)
    })
    cd
  }

  def ==(colName: String, colVal: Any): org.apache.spark.sql.Column = col(colName) === lit(colVal)
  def !=(colName: String, colVal: Any): org.apache.spark.sql.Column = col(colName) !== lit(colVal)
  def >(colName: String, colVal: Any): org.apache.spark.sql.Column = col(colName) > lit(colVal)
  def >=(colName: String, colVal: Any): org.apache.spark.sql.Column = col(colName) >= lit(colVal)
  def <(colName: String, colVal: Any): org.apache.spark.sql.Column = col(colName) < lit(colVal)
  def <=(colName: String, colVal: Any): org.apache.spark.sql.Column = col(colName) <= lit(colVal)
  def notNull(colName: String): org.apache.spark.sql.Column = col(colName).isNotNull
  def isNull(colName: String): org.apache.spark.sql.Column = col(colName).isNull
  def isNan(colName: String): org.apache.spark.sql.Column = col(colName).isNaN
  def --(colName1: String, colName2: String): org.apache.spark.sql.Column = col(colName1) - col(colName2)
  def ++(colName1: String, colName2: String): org.apache.spark.sql.Column = col(colName1) + col(colName2)

  def gth(colName: String, colVal: Int): String = s"$colName>$colVal"
  def geq(colName: String, colVal: Int): String = s"$colName>=$colVal"
  def beq(colName: String, colVal: Int): String = s"$colName=$colVal"
  def neq(colName: String, colVal: Int): String = s"$colName!=$colVal"
  def lth(colName: String, colVal: Int): String = s"$colName<$colVal"
  def leq(colName: String, colVal: Int): String = s"$colName<=$colVal"

  def gth(colName: String, colVal: Long): String = s"$colName>$colVal"
  def geq(colName: String, colVal: Long): String = s"$colName>=$colVal"
  def bEq(colName: String, colVal: Long): String = s"$colName=$colVal"
  def nEq(colName: String, colVal: Long): String = s"$colName!=$colVal"
  def lth(colName: String, colVal: Long): String = s"$colName<$colVal"
  def leq(colName: String, colVal: Long): String = s"$colName<=$colVal"
}