package com.ctbri.manage.bydeequ.calculate
import com.ctbri.manage.bydeequ.example.Item
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.expressions.Window
import com.amazon.deequ.analyzers.Patterns
import org.apache.spark.sql.types.{BooleanType, DataTypes, LongType, StringType, StructField, StructType}

import java.util._


/**
 * @author songyunlong
 * @createTime 2023/6/26 16:54
 * @description
 */
trait DataListGeneratedBySql{
    /**
     *
     * @param meetRequirements: Boolean
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticNull(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        if (meetRequirements){
            data.filter(col(colName).isNotNull)
        }
        else{
            data.filter(col(colName).isNull)
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticUniqueofOneColumn(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        val partionStatic = data
            .filter($"${colName}".isNotNull)
            .select($"*", count(colName).over(Window.partitionBy(colName)).alias(s"${colName}_count"))
        if (meetRequirements){
            partionStatic.filter($"${colName}_count"===1).drop($"${colName}_count")
        }
        else{
            partionStatic.filter($"${colName}_count">1).drop($"${colName}_count")
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticMin(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        assert(meetRequirements)
        import spark.implicits._
        data.select(min($"${colName}"))
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return
     */
    def staticMax(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        assert(meetRequirements)
        import spark.implicits._
        data.select(max($"${colName}"))
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return
     */
    def staticMean(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        assert(meetRequirements)
        import spark.implicits._
        data.select(mean($"${colName}"))
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return
     */
    def staticSum(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        assert(meetRequirements)
        import spark.implicits._
        data.select(sum($"${colName}"))
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticHasMinLength(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 完善此函数可配置实现指定字段输出到数据清单
        assert(meetRequirements)
        import spark.implicits._
        spark.udf.register("getMinLength", (col: String) => col.length, DataTypes.IntegerType)
        data
            .filter(col(colName).isNotNull)
            .withColumn(s"${colName}_len", callUDF("getMinLength", $"$colName")).select(min($"${colName}_len"))
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticHasMaxLength(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 完善此函数可配置实现指定字段输出到数据清单
        assert(meetRequirements)
        import spark.implicits._
        spark.udf.register("getMinLength", (col: String) => col.length, DataTypes.IntegerType)
        data
            .filter(col(colName).isNotNull)
            .withColumn(s"${colName}_len", callUDF("getMinLength", $"$colName")).select(max($"${colName}_len"))
    }
    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticContainsEmail(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        val pattern: String = Patterns.EMAIL.toString()
        this.staticHasPattern(meetRequirements, spark, colName, pattern, data)
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param pattern: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticHasPattern(meetRequirements: Boolean, spark: SparkSession, colName: String, pattern: String, data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        spark.udf.register("checkType", (col: String) => col.matches(pattern), DataTypes.BooleanType)
        if (meetRequirements){
            data
                .filter(col(colName).isNotNull)
                .filter(callUDF("checkType", $"${colName}").equalTo(true))
        }
        else{
            data
                .filter(col(colName).isNull || callUDF("checkType", $"${colName}").equalTo(false))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param allowedValues: Array[B]
     * @param data: DataFrame
     * @tparam B
     * @return: DataFrame
     */
    def staticIsContainedIn[B](meetRequirements: Boolean, spark: SparkSession, colName: String, allowedValues: Array[B], data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        if (meetRequirements){
            data.filter($"${colName}".isInCollection(allowedValues) || $"${colName}".isNull)
        }
        else{
            data.filter(!($"${colName}".isInCollection(allowedValues) || $"${colName}".isNull))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colNameA: String
     * @param colNameB: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticIsLessThanOrEqualTo(meetRequirements: Boolean, spark: SparkSession, colNameA: String, colNameB: String, data: DataFrame): DataFrame={ //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        if (!meetRequirements){
            data.filter($"${colNameA}">$"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull)
        }
        else{
            data.filter(!($"${colNameA}">$"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colNameA: String
     * @param colNameB: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticIsLessThan(meetRequirements: Boolean, spark: SparkSession, colNameA: String, colNameB: String, data: DataFrame): DataFrame= { //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        if (!meetRequirements) {
            data.filter($"${colNameA}" >= $"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull)
        }
        else {
            data.filter(!($"${colNameA}" >= $"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colNameA: String
     * @param colNameB: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticIsGreaterThan(meetRequirements: Boolean, spark: SparkSession, colNameA: String, colNameB: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        if (!meetRequirements) {
            data.filter($"${colNameA}" <= $"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull)
        }
        else {
            data.filter(!($"${colNameA}" <= $"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colNameA: String
     * @param colNameB: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticIsGreaterThanOrEqualTo(meetRequirements: Boolean, spark: SparkSession, colNameA: String, colNameB: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        import spark.implicits._
        if (!meetRequirements) {
            data.filter($"${colNameA}" < $"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull)
        }
        else {
            data.filter(!($"${colNameA}" < $"${colNameB}" || $"${colNameA}".isNull || $"${colNameB}".isNull))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticContainsURL(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        val pattern: String = Patterns.URL.toString()
        this.staticHasPattern(meetRequirements, spark, colName, pattern, data)
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticContainsCreditCardNumber(meetRequirements: Boolean, spark: SparkSession, colName: String, data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        val pattern: String = "^[1-9]\\d{5}(18|19|20)\\d{2}((0[1-9])|(1[0-2]))(([0-2][1-9])|10|20|30|31)\\d{3}[0-9Xx]$"
        this.staticHasPattern(meetRequirements, spark, colName, pattern, data)
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colNames: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticHaveCompleteness(meetRequirements: Boolean, spark: SparkSession, colNames: Array[String], data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        assert(colNames.length > 0)
        import spark.implicits._
        if (!meetRequirements) {
            data.filter(colNames.slice(1, colNames.length).foldLeft($"${colNames(0)}".isNull)((column, colName)=>column.or($"$colName".isNull)))
        }
        else {
            data.filter(!colNames.slice(1, colNames.length).foldLeft($"${colNames(0)}".isNull)((column, colName)=>column.or($"$colName".isNull)))
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colNames: String
     * @param data: DataFrame
     * @return: DataFrame
     */
    def staticHasUniqueness(meetRequirements: Boolean, spark: SparkSession, colNames: Array[String], data: DataFrame): DataFrame = { //todo: 可配置实现指定字段输出到数据清单
        assert(colNames.length > 0)
        import spark.implicits._
        val partionStatic = data
            .filter(!colNames.slice(1, colNames.length).foldLeft($"${colNames(0)}".isNull)((column, colName)=>column.&&($"$colName".isNull)))
            .select($"*", count(concat_ws(",", colNames.map(col=>$"$col"): _*)).over(Window.partitionBy(concat_ws(",", colNames.map(col=>$"$col"): _*))).alias("colNames_count"))
//        partionStatic.withColumn("zhegezhege", concat_ws(",", colNames.map(col=>$"$col"): _*)).show(100, false)
        if (meetRequirements) {
            partionStatic.filter($"colNames_count" === 1).drop($"colNames_count")
        }
        else {
            partionStatic.filter($"colNames_count" > 1).drop($"colNames_count")
        }
    }

    /**
     *
     * @param meetRequirements: Boolean
     * @param spark: SparkSession
     * @param colName: String
     * @param compareFuncParam: HashMap[String, String]
     * @param data: DataFrame
     * @return: Tuple3(DataFrame, Double, Double)
     */
    def staticHasOneColCompare(meetRequirements: Boolean, spark: SparkSession, colName: String, compareFuncParam: HashMap[String, String], data: DataFrame): (DataFrame, Double, Double)={
        import spark.implicits._
        val partionStatic = data.filter($"$colName".isNotNull)
        //继承自java.UDF1的UDF函数
        import org.apache.spark.sql.api.java.UDF1
        class CompareFunc[B] extends UDF1[B, Boolean] {
            @throws[Exception]
            override def call(col: B): Boolean = compareFuncParam.get("compareStyle") match {
                    case "lessThan" => compareFuncParam.get("colType") match {
                        case "Int" => col.asInstanceOf[Int] < compareFuncParam.get("thv").toDouble
                        case "Double" => col.asInstanceOf[Double] < compareFuncParam.get("thv").toDouble
                        case "Long" => col.asInstanceOf[Long] < compareFuncParam.get("thv").toDouble
                        case "Float" => col.asInstanceOf[Float] < compareFuncParam.get("thv").toDouble
                        case "String" =>col.asInstanceOf[String] < compareFuncParam.get("thv")
                        case _ => false
                    }
                    case "lessOrEqualThan" => compareFuncParam.get("colType") match {
                        case "Int" => col.asInstanceOf[Int] <= compareFuncParam.get("thv").toDouble
                        case "Double" => col.asInstanceOf[Double] <= compareFuncParam.get("thv").toDouble
                        case "Long" => col.asInstanceOf[Long] <= compareFuncParam.get("thv").toDouble
                        case "Float" => col.asInstanceOf[Float] <= compareFuncParam.get("thv").toDouble
                        case "String" => col.asInstanceOf[String] <= compareFuncParam.get("thv")
                        case _ => false
                    }
                    case "greaterThan" => compareFuncParam.get("colType") match {
                        case "Int" => col.asInstanceOf[Int] > compareFuncParam.get("thv").toDouble
                        case "Double" => col.asInstanceOf[Double] > compareFuncParam.get("thv").toDouble
                        case "Long" => col.asInstanceOf[Long] > compareFuncParam.get("thv").toDouble
                        case "Float" => col.asInstanceOf[Float] > compareFuncParam.get("thv").toDouble
                        case "String" => col.asInstanceOf[String] > compareFuncParam.get("thv")
                        case _ => false
                    }
                    case "greaterOrEqualThan" => compareFuncParam.get("colType") match {
                        case "Int" => col.asInstanceOf[Int] >= compareFuncParam.get("thv").toDouble
                        case "Double" => col.asInstanceOf[Double] >= compareFuncParam.get("thv").toDouble
                        case "Long" => col.asInstanceOf[Long] >= compareFuncParam.get("thv").toDouble
                        case "Float" => col.asInstanceOf[Float] >= compareFuncParam.get("thv").toDouble
                        case "String" => col.asInstanceOf[String] >= compareFuncParam.get("thv")
                        case _ => false
                    }
                    case "equalsTo" => compareFuncParam.get("colType") match {
                        case "Int" => col.asInstanceOf[Int] == compareFuncParam.get("thv").toDouble
                        case "Double" => col.asInstanceOf[Double] == compareFuncParam.get("thv").toDouble
                        case "Long" => col.asInstanceOf[Long] == compareFuncParam.get("thv").toDouble
                        case "Float" => col.asInstanceOf[Float] == compareFuncParam.get("thv").toDouble
                        case "String" => col.asInstanceOf[String] == compareFuncParam.get("thv")
                        case _ => false
                    }
            }
        }
        spark.udf.register("customFunc", new CompareFunc, DataTypes.BooleanType)
        var result: DataFrame = partionStatic.filter(callUDF("customFunc", $"$colName"))
        val resultCount = result.count()
        if (!meetRequirements){
            result = partionStatic.filter(!callUDF("customFunc", $"$colName"))
        }
        (result, partionStatic.count(), resultCount)
    }
}
object DataListGeneratedBySqlTest extends DataListGeneratedBySql {
    def main(args: Array[String]): Unit={
        val spark = SparkSession.builder()
            .master("local")
            .appName("test")
            .config("spark.ui.enabled", "false")
            .getOrCreate()
        val rdd = spark.sparkContext.parallelize(Seq(
            Item(1, "thingy A", "110111199710245113", "20.2342", 0),
            Item(2, "thingy B", "140111199710245113", null, 0),
            Item(3, null, null, "low", 5),
            Item(4, "thingy D", "https://thingd.ca", "3.02234", 2),
            Item(5, "thingy E", "www.thingd.ca", "high", 5)))
        val schema = StructType(Array(
            StructField("id", LongType, true),
            StructField("productName", StringType, true),
            StructField("description", StringType, true),
            StructField("priority", StringType, true),
            StructField("numViews", LongType, true)
        ))
        val data = spark.createDataFrame(rdd)
        data.show()
//        val resultOfStaticNull1 = this.staticNull(true, spark, "productName", data)
//        resultOfStaticNull1.show(100, false)
//        val resultOfStaticNull2 = this.staticNull(false, spark, "productName", data)
//        resultOfStaticNull2.show(100, false)

//        val resultOfStaticUniqueofOneColumn1 = this.staticUniqueofOneColumn(true, spark, "priority", data)
//        resultOfStaticUniqueofOneColumn1.show(100, false)
//        val resultOfStaticUniqueofOneColumn2 = this.staticUniqueofOneColumn(false, spark, "priority", data)
//        resultOfStaticUniqueofOneColumn2.show(100, false)

//        val resultOfStaticMin = this.staticMin(true, spark, "numViews", data)
//        resultOfStaticMin.show(100, false)
//        val resultOfStaticMax = this.staticMax(true, spark, "numViews", data)
//        resultOfStaticMax.show(100, false)
//        val resultOfStaticMean = this.staticMean(true, spark, "numViews", data)
//        resultOfStaticMean.show(100, false)
//        val resultOfStaticSum = this.staticSum(true, spark, "numViews", data)
//        resultOfStaticSum.show(100, false)

//        val resultOfStaticContainsEmail1 = this.staticContainsEmail(true, spark, "description", data)
//        resultOfStaticContainsEmail1.show(100, false)
//        val resultOfStaticContainsEmail2 = this.staticContainsEmail(false, spark, "description", data)
//        resultOfStaticContainsEmail2.show(100, false)

//        val resultOfStaticIsContainedIn1 = this.staticIsContainedIn[String](true, spark, "productName", Array("Thingy A", "Thingy B"), data)
//        resultOfStaticIsContainedIn1.show(100, false)
//        val resultOfStaticIsContainedIn2 = this.staticIsContainedIn[String](false, spark, "productName", Array("Thingy A", "Thingy B"), data)
//        resultOfStaticIsContainedIn2.show(100, false)

//        val resultOfStaticIsLessThanOrEqualTo1 = this.staticIsLessThanOrEqualTo(true, spark, "productName", "priority", data)
//        resultOfStaticIsLessThanOrEqualTo1.show(100, false)
//        val resultOfStaticIsLessThanOrEqualTo2 = this.staticIsLessThanOrEqualTo(false, spark, "productName", "priority", data)
//        resultOfStaticIsLessThanOrEqualTo2.show(100, false)

//        val resultOfStaticHasPattern1 = this.staticHasPattern(true, spark, "description", "\\w+@\\w+\\.\\w+", data) //测试是否是数值类型的正则表达式: "^\\d+?\\.??\\d+?$"
//        resultOfStaticHasPattern1.show(100, false)
//        val resultOfStaticHasPattern2 = this.staticHasPattern(false, spark, "description", "\\w+@\\w+\\.\\w+", data)
//        resultOfStaticHasPattern2.show(100, false)

//        val resultOfStaticHasMinLength1 = this.staticHasMinLength(true, spark, "description", data)
//        resultOfStaticHasMinLength1.show(100, false)
//        val resultOfStaticHasMinLength2 = this.staticHasMinLength(false, spark, "description", data)
//        resultOfStaticHasMinLength2.show(100, false)

//        val resultOfStaticHasMaxLength1 = this.staticHasMaxLength(true, spark, "description", data)
//        resultOfStaticHasMaxLength1.show(100, false)
//        val resultOfStaticHasMaxLength2 = this.staticHasMaxLength(false, spark, "description", data)
//        resultOfStaticHasMaxLength2.show(100, false)

//        val resultOfStaticIsLessThan1 = this.staticIsLessThan(true, spark, "productName", "priority", data)
//        resultOfStaticIsLessThan1.show(100, false)
//        val resultOfStaticIsLessThan2 = this.staticIsLessThan(false, spark, "productName", "priority", data)
//        resultOfStaticIsLessThan2.show(100, false)

//        val resultOfStaticIsGreaterThan1 = this.staticIsGreaterThan(true, spark, "productName", "priority", data)
//        resultOfStaticIsGreaterThan1.show(100, false)
//        val resultOfStaticIsGreaterThan2 = this.staticIsGreaterThan(false, spark, "productName", "priority", data)
//        resultOfStaticIsGreaterThan2.show(100, false)

//        val resultOfStaticIsGreaterThanOrEqualTo1 = this.staticIsGreaterThanOrEqualTo(true, spark, "productName", "priority", data)
//        resultOfStaticIsGreaterThanOrEqualTo1.show(100, false)
//        val resultOfStaticIsGreaterThanOrEqualTo2 = this.staticIsGreaterThanOrEqualTo(false, spark, "productName", "priority", data)
//        resultOfStaticIsGreaterThanOrEqualTo2.show(100, false)

//        val resultOfStaticContainsURL1 = this.staticContainsURL(true, spark, "description", data)
//        resultOfStaticContainsURL1.show(100, false)
//        val resultOfStaticContainsURL2 = this.staticContainsURL(false, spark, "description", data)
//        resultOfStaticContainsURL2.show(100, false)

//        val resultOfStaticContainsCreditCardNumber1 = this.staticContainsCreditCardNumber(true, spark, "description", data)
//        resultOfStaticContainsCreditCardNumber1.show(100, false)
//        val resultOfStaticContainsCreditCardNumber2 = this.staticContainsCreditCardNumber(false, spark, "description", data)
//        resultOfStaticContainsCreditCardNumber2.show(100, false)

        val resultOfStaticHaveCompleteness1 = this.staticHaveCompleteness(true, spark, Array("description", "priority"), data)
        resultOfStaticHaveCompleteness1.show(100, false)
        val resultOfStaticHaveCompleteness2 = this.staticHaveCompleteness(false, spark, Array("description", "priority"), data)
        resultOfStaticHaveCompleteness2.show(100, false)

        spark.stop()
    }
}
