package a_aa_amainpackage.a_o2odata_deal.utils

import java.util
import java.util.Properties

import org.apache.spark.sql.functions.lit
import org.apache.spark.sql._

/**
 * 校验最后数据的类
 */
object CheckData {

  /**
   * 校验数据调用总函数
   * @param spark
   * @param data
   */
  def checkData(spark: SparkSession, data: DataFrame): Unit = {
    categoryCheck(data, spark)
    addressCheck(data, spark)
    priceisZeroCheck(data, spark)
    fieldCheckout(data, spark)
  }

  /**
   * 分类校验
   *
   * @param dataFrame
   * @param spark
   */
  private def categoryCheck(dataFrame: DataFrame, spark: SparkSession): Unit = {

    dataFrame.createOrReplaceTempView("categoryCheckTable")
    val categoryCheckframe = spark.sql(
      s"""
          select firstCategoryId,secondCategoryId,thirdCategoryId,fourthCategoryId,timeStamp,platformId,
          count(1) goodCount,sum(salesAmount) salesAmount,sum(sellCount) sellCount
           from categoryCheckTable
           group by firstCategoryId,secondCategoryId,thirdCategoryId,fourthCategoryId,timeStamp,platformId
      """.stripMargin)
    categoryCheckframe.createOrReplaceTempView("categoryCheckSource")
    val rows = spark.sql(
      """
        select timeStamp,platformId from categoryCheckSource group by timeStamp,platformId
      """.stripMargin).collectAsList()

    for (i <- 0 to rows.size() - 1) {
      //统计出每个分类的销售额和销售量主要是用来监测一级分类中10099的量大小
      spark.sql(
        s"""
           |select firstCategoryId,secondCategoryId,thirdCategoryId,
           | fourthCategoryId,timeStamp,platformId,goodCount,salesAmount,sellCount
           | from categoryCheckSource where timeStamp = '${rows.get(i).get(0)}' and platformId = '${rows.get(i).get(1)}'
        """.stripMargin).repartition(4).write.mode(SaveMode.Overwrite).orc(s"s3a://o2o-dataproces-group/liu_jianlin/checkTable/categoryCheck/${rows.get(i).get(0)}/${rows.get(i).get(1)}")

      //一级 、二级、三级、四级不统一的情况
      spark.sql(
        s"""
           |select firstCategoryId,secondCategoryId,thirdCategoryId,fourthCategoryId,timeStamp,platformId
           |from categoryCheckSource  where timeStamp = '${rows.get(i).get(0)}' and platformId = '${rows.get(i).get(1)}'
           |-- 二级和一级不统一
           |and ( substr(secondCategoryId,0,5) != firstCategoryId
           |   or substr(thirdCategoryId,0,5)  != firstCategoryId
           |   or substr(fourthCategoryId,0,5) != firstCategoryId
           |   or substr(thirdCategoryId,0,7)  != secondCategoryId
           |   or substr(fourthCategoryId,0,7) != secondCategoryId
           |   or substr(fourthCategoryId,0,9) != thirdCategoryId)
        """.stripMargin).repartition(4)
        .write
        .mode(SaveMode.Overwrite)
        .orc(s"s3a://o2o-dataproces-group/liu_jianlin/checkTable/categoryCheckError/${rows.get(i).get(0)}/${rows.get(i).get(1)}")

    }

  }

  /**
   * 地址校验
   *
   * @param dataFrame
   * @param spark
   */
  private def addressCheck(dataFrame: DataFrame, spark: SparkSession): Unit = {
    dataFrame.createOrReplaceTempView("addressCheckTable")

    val categoryCheckframe = spark.sql(
      s"""
select province,timeStamp,platformId
,count(1) goodCount,sum(salesAmount) salesAmount,sum(sellCount) sellCount
 from addressCheckTable
 group by province,timeStamp,platformId
      """.stripMargin).cache()

    categoryCheckframe.createOrReplaceTempView("addressCheckTableSource")

    val rows = spark.sql(
      """
select timeStamp,platformId from addressCheckTableSource group by timeStamp,platformId
      """.stripMargin).collectAsList()

    for (i <- 0 to rows.size() - 1) {


      //统计出每个分类的销售额和销售量主要是用来监测一级分类中10099的量大小
      spark.sql(
        s"""
           |select *
           | from addressCheckTableSource where timeStamp = '${rows.get(i).get(0)}' and platformId = '${rows.get(i).get(1)}'
        """.stripMargin).repartition(4).write.mode(SaveMode.Overwrite)
        .orc(s"s3a://o2o-dataproces-group/liu_jianlin/checkTable/addressCheck/${rows.get(i).get(0)}/${rows.get(i).get(1)}")

    }

  }


  /**
   * 价格、销量、销售额小于等于的数据
   *
   * @param data  要校验的数据源  df
   * @param spark 传入一个sparksession
   */
  private def priceisZeroCheck(data: DataFrame, spark: SparkSession): Unit = {

    //获取到数据中的 时间 和 平台名称
    val rows = data.select("timeStamp", "platformName").limit(1)
      .collectAsList()
    val platformName = rows.get(0).get(0)
    val timeStamp = rows.get(0).get(1)
    val zeroData: DataFrame = data.where("priceText <= 0.00 or sellCount <= 0 or salesAmount <= 0 ")
    val value: Long = zeroData.count()

    val time = System.currentTimeMillis()

    if (value > 0) {
      var err = "!!!!!!!!!!!检测出价格、销量、销售额小于0的条数为：" + value
      zeroData.select("good_id", "priceText", "sellCount", "salesAmount", "platformName", "timeStamp")
        .withColumn("errorInfor", lit("价格、销量、销售额小于0的条数为：" + value))
        .limit(100)
        .write
        .mode(SaveMode.Overwrite)
        .json(s"s3a://o2o-dataproces-group/liu_jianlin/checkTable/priceisZeroCheck/${timeStamp}/${platformName}")
    }
  }


  /**
   * 校验字段是否打全 以及关键字段是否有null
   *
   * @param data 清洗数据结果  df
   * @platformName 清洗平台名称
   * @param spark SparkSession
   */
  private def fieldCheckout(data: DataFrame, spark: SparkSession): Unit = {

    //获取到数据中的 时间 和 平台名称
    val rows = data.select("timeStamp", "platformName").limit(1)
      .collectAsList()


    val fields: Array[String] = data.schema.fieldNames
    val timestampCheck: Boolean = fields.contains("timeStamp")
    val platformIdCheckout: Boolean = fields.contains("platformId")
    val brandName_cnCheckout: Boolean = fields.contains("brandName_cn")
    val firstCategoryIdCheck: Boolean = fields.contains("firstCategoryId")
    val secondCategoryIdCheck: Boolean = fields.contains("secondCategoryId")
    val thirdCategoryIdCheck: Boolean = fields.contains("thirdCategoryId")
    val fourthCategoryIdCheck: Boolean = fields.contains("fourthCategoryId")
    val sellCountCheck: Boolean = fields.contains("sellCount")
    val salesAmountCheck: Boolean = fields.contains("salesAmount")
    val provinceCheck: Boolean = fields.contains("province")
    val cityCheck: Boolean = fields.contains("city")
    val districtCheck: Boolean = fields.contains("district")

    var err: String = ""

    if (!provinceCheck) {
      err = err + "!!!!!!!!!!!!不包含province字段" + "\n"
    }
    if (!cityCheck) {
      err = err + "!!!!!!!!!!!!不包含city字段" + "\n"
    }
    if (!districtCheck) {
      err = err + "!!!!!!!!!!!!不包含district字段" + "\n"
    }
    if (!timestampCheck) {
      err = err + "!!!!!!!!!!!!不包含timeStamp字段" + "\n"
    }
    if (!platformIdCheckout) {
      err = err + "!!!!!!!!!!!!不包含platformId字段" + "\n"
    }
    if (!brandName_cnCheckout) {
      err = err + "!!!!!!!!!!!!不包含brandName_cn字段" + "\n"
    }
    if (!firstCategoryIdCheck) {
      err = err + "!!!!!!!!!!!!不包含firstCategoryid字段" + "\n"
    }
    if (!secondCategoryIdCheck) {
      err = err + "!!!!!!!!!!!!不包含secondCategoryId字段" + "\n"
    }
    if (!thirdCategoryIdCheck) {
      err = err + "!!!!!!!!!!!!不包含thirdCategoryId字段" + "\n"
    }
    if (!fourthCategoryIdCheck) {
      err = err + "!!!!!!!!!!!!不包含fourthCategoryId字段" + "\n"
    }
    if (!sellCountCheck) {
      err = err + "!!!!!!!!!!!!不包含sellCount字段" + "\n"
    }
    if (!salesAmountCheck) {
      err = err + "!!!!!!!!!!!!不包含salesAmount字段" + "\n"
    }
    val timeStampNullData: Dataset[Row] = data.where("timeStamp is null")
    val timeStampNullValue: Long = timeStampNullData.count()
    if (timeStampNullValue > 0) {
      err = err + "!!!!!!!!!!!!检测出有时间戳值为null======" + "\n"
    }
    val platformIdnullData: Dataset[Row] = data.where("platformId is null")
    val platformIdnullValue: Long = platformIdnullData.count()
    if (platformIdnullValue > 0) {
      err = err + "!!!!!!!!!!!检测出有platformId值为null======" + "\n"
    }
    val firstCategoryidnullData: Dataset[Row] = data.where("firstCategoryId is null ")
    val firstCategoryidnullValue: Long = firstCategoryidnullData.count()
    if (firstCategoryidnullValue > 0) {
      err = err + "!!!!!!!!!!!检测出有firstCategoryid值为null======" + "\n"
    }
    val secondCategoryIdnullData: Dataset[Row] = data.where("secondCategoryId is null")
    val secondCategoryIdnullvalue: Long = secondCategoryIdnullData.count()
    if (secondCategoryIdnullvalue > 0) {
      err = err + "!!!!!!!!!!!检测出有secondCategoryId值为null======" + "\n"
    }
    val thirdCategoryIdnullData: Dataset[Row] = data.where("thirdCategoryId is null")
    val thirdCategoryIdnullvalue: Long = thirdCategoryIdnullData.count()
    if (thirdCategoryIdnullvalue > 0) {
      err = err + "!!!!!!!!!!!检测出有thirdCategoryId值为null======" + "\n"
      //      thirdCategoryIdnullData.write.json("D:/git/reposity/taobaoHandleProject/logs/null")
      //      println(thirdCategoryIdnullData.show(1000,1000))
    }
    val fourthCategoryIdnullData: Dataset[Row] = data.where("fourthCategoryId is null ")
    val fourthCategoryIdnullvalue: Long = fourthCategoryIdnullData.count()
    if (fourthCategoryIdnullvalue > 0) {
      err = err + "!!!!!!!!!!!检测出有thirdCategoryId值为null======" + "\n"
    }
    val sellCountnullData: Dataset[Row] = data.where("sellCount is null")
    val sellCountnullvalue: Long = sellCountnullData.count()
    if (sellCountnullvalue > 0) {
      err = err + "!!!!!!!!!!检测出有sellCount值为null======" + "\n"
    }

    val salesAmountnullData: Dataset[Row] = data.where("salesAmount is null ")
    val salesAmountnullValue: Long = salesAmountnullData.count()
    if (salesAmountnullValue > 0) {
      err = err + "!!!!!!!!!!检测出有salesAmount值为null======" + "\n"
    }


    val timeStamp = rows.get(0).get(0).toString
    val platformName = rows.get(0).get(1).toString

    spark.sqlContext.createDataFrame(Seq((timeStamp, platformName, err)))
      .withColumnRenamed("_c0", "toObsTime")
      .withColumnRenamed("_c1", "platformName")
      .withColumnRenamed("_c2", "errorInfomation")
      .repartition(1)
      .write
      .mode(SaveMode.Overwrite)
      .orc(s"s3a://o2o-dataproces-group/liu_jianlin/checkTable/fieldCheckout/${timeStamp}/${platformName}")

  }
}
