package com.o2o.cleaning.month.platform.ebusiness_plat.pupuchaoshi

import com.alibaba.fastjson.JSON
import com.o2o.utils.Iargs
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.{DataFrame, SparkSession}

object PuPuChaoShi {
  val readDatabase = "PupuChaoShi" //平台名称
  val year = Iargs.YEAR
  val month = Iargs.MONTH
  //  val month = Array(10)
  val timeStamp = Iargs.TIMESAMP
  //  var timeStamp = Array(TimesYearAll.TIME202110) //每个月固定时间戳
  val platformName = "PupuChaoShi"
  //  val address_source_path = s"s3a://o2o-dimension-table/address_table/address_table_${year}/${month}/address_platform/PupuChaoShi_address_${year}_${month}/"
  val address_source_path = s"s3a://o2o-dimension-table/address_table/address_table_${year}/${month}/address_platform/PupuChaoShi_address_${year}_${month}/"

  var resultPath_dws = s"s3a://dws-data/split/split_data/${year}/${readDatabase}/${month}/"
  val category_path = s"s3a://o2o-dimension-table/category_table/pupuchaoshi/2021_9/"

  def main(args: Array[String]): Unit = {

    //spark mongo连接配置
    val spark = SparkSession.builder()
      .master("local[*]")
      .config("spark.debug.maxToStringFields", "10000")
      .appName("MongoSparkConnectorIntro")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    //obs设置
    val sc: SparkContext = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    //    for (i <- 0 to month.length - 1) {
    //原始数据
    val sourcePath = s"s3a://o2o-sourcedata-${year}/obs-source-${year}/${month}/${readDatabase}/"
    println(sourcePath)
    //商品的清洗结果路径
    val resultPath = s"s3a://o2o-tempdata/zyf/${year}/${month}/${readDatabase}/"
    println(resultPath)
    //       统计分类
    //      spark.read.orc(sourcePath).registerTempTable("t1")
    //      spark.sql(
    //        """
    //          |select distinct rootCategoryId,
    //          |rootCategoryName,
    //          |categoryId,
    //          |categoryName
    //          |from t1
    //          |""".stripMargin).repartition(1).write.option("header", true).csv(s"D:\\${readDatabase}\\${month(i)}")
    //      清洗计算
    val frame: DataFrame = pupuchaoshiCaculate(spark, sourcePath)
      .where("sellCount>0").where("priceText>0").drop("add_to_field")
    //      frame.printSchema()
    frame.registerTempTable("t2")
    println("销售量和销售额：")
    spark.sql(
      """
        |select count(1) count, cast(sum(sellCount) as bigInt) sellsum,cast(sum(salesAmount) as decimal(20,2)) salesum from t2
                  """.stripMargin
    ).show(false)
    //打标签
    val s_002 = frame
      .withColumn("platformName", lit("朴朴超市"))
      .withColumn("shopType", lit("B"))
      .withColumn("timeStamp", lit(s"${timeStamp}"))

    println("-----关联分类-----")
    val frame1 = pupuchaoshiCategory(spark, s_002, category_path)
    //    frame1.show(false)
    println("-----关联地址-----")
    val frame2 = pupuchaoshiAddress(spark, frame1, address_source_path)

    frame2.repartition(10).write.mode("overwrite").orc(resultPath)

    //        这个是查看schema的语句
    //      frame2.printSchema()

    //      println("-----关联品牌-----")
    //      val brand = new brand_join_res
    //      brand.brandJoinResult(frame2, resultPath, year.toInt, month(i), platformName, spark)

    //    }

  }


  /**
    * 商品计算
    *
    * @param spark
    * @param sourcePath
    * @return
    */
  def pupuchaoshiCaculate(spark: SparkSession, sourcePath: String): DataFrame = {
    val pupuchaoshi = spark.read.orc(sourcePath).toJSON.rdd.map(line => {
      // 销量销售额计算
      val nObject = JSON.parseObject(line)
      // 获取add_to_field数组
      val array = nObject.getJSONArray("add_to_field")
      var sellCount = 0
      var lastQuantity = 0
      var priceText = "0"
      try {
        for (i <- 0 to array.size() - 1) {
          val nObject1 = array.getJSONObject(i)
          val sellCount_0 = nObject1.get("quantity").toString.toInt
          if (i != 0 && (sellCount_0 > lastQuantity)) {
            sellCount += sellCount_0 - lastQuantity
          }
          lastQuantity = sellCount_0

          //最后一天的价格
          if (i + 1 == array.size()) {
            priceText = nObject1.get("priceText").toString
            nObject.put("priceText_collect", priceText)
            //对价格进行计算
            if (priceText.equals("-1")) {
              priceText = "0"
            } else if (priceText.contains("-")) {
              priceText = priceText.split("-")(0)
            } else if (priceText.contains(",")) {
              priceText = priceText.replace(",", "")
            } else if (priceText.contains("\"\"")) {
              priceText = priceText.split("\"\"")(0)
            } else {
              priceText
            }
          }

        }
      } catch {
        case e: Exception => println(e)
          println("nObject : --> " + nObject)
      }
      //***************************修改部分***************************//

      //      val sells: Int = (sellCount * 30 / 27).toInt

      //***************************修改部分***************************//

      //      var salesAmount = (priceText.toDouble * (sellCount)).toDouble.formatted("%.2f")
      var salesAmount = (priceText.toDouble * (sellCount)).toDouble.formatted("%.2f")

      //      nObject.put("sellCount", sells)
      nObject.put("sellCount", sellCount)
      nObject.put("priceText", priceText)
      nObject.put("salesAmount", salesAmount)
      nObject.remove("add_to_field")

      nObject.toString
    })
    spark.read.json(pupuchaoshi)
  }

  /**
    * 朴朴超市的分类处理，关联firstCategoryId，secondCategoryId，thirdCategoryId，fourthCategoryId
    * 关联不到 lastcategoryid 的用上一级 thirdCategoryId 关联 fourthCategoryId 置为 ‘100xxxxxx99’,以此类推关联 secondCategoryId 和 关联firstCategoryId
    *
    * @param spark
    * @param frame
    * @param category_path
    * @return
    */
  def pupuchaoshiCategory(spark: SparkSession, frame: DataFrame, category_path: String): DataFrame = {
    frame.registerTempTable("t_all")
    spark.read.option("header", true).csv(category_path).registerTempTable("t_category")
    val framCategory1 = spark.sql(
      """
        |select
        |t1.*,
        |case when t1.categoryId is null then '10099' else t2.firstCategoryId end firstCategoryId,
        |case when t1.categoryId is null then '1009999' else t2.secondCategoryId end secondCategoryId,
        |case when t1.categoryId is null then '100999999' else t2.thirdCategoryId end thirdCategoryId,
        |case when t1.categoryId is null then '10099999999' else t2.fourthCategoryId end fourthCategoryId
        |from
        |t_all t1
        |left join t_category t2
        |on t1.rootCategoryId = t2.rootCategoryId and t1.categoryId = t2.categoryId
        |""".stripMargin)

    println("没有关联上的 count ： ")

    spark.sql(
      """
        |select count(1) from t_all t1 left join t_category t2 on t1.rootCategoryId = t2.rootCategoryId and t1.categoryId = t2.categoryId where t2.rootCategoryId is null or t2.categoryId is null
        |""".stripMargin).show(false)

    framCategory1

  }

  /**
    * 朴朴的地址处理，关联店铺地址信息，没有关联上的默认是 0
    *
    * @param spark
    * @param frame
    * @param address_source_path
    * @return
    */
  def pupuchaoshiAddress(spark: SparkSession, frame: DataFrame, address_source_path: String): DataFrame = {
    frame.drop("city")
      .drop("latitude")
      .drop("longitude")
      .drop("province")
      .drop("address")
      .registerTempTable("all")
    spark.read.json(address_source_path).registerTempTable("address")
    val frameAddress = spark.sql(
      """
        |select t1.*,
        |case when t2.shopId is not null then t2.address  else  '0' end address,
        |case when t2.shopId is not null then t2.administrative_region  else  '0' end administrative_region,
        |case when t2.shopId is not null then t2.aedzId  else  '0' end aedzId,
        |case when t2.shopId is not null then t2.city  else  '0' end city,
        |case when t2.shopId is not null then t2.city_grade  else  '0' end city_grade,
        |case when t2.shopId is not null then t2.city_origin  else  '0' end city_origin,
        |case when t2.shopId is not null then t2.district  else  '0' end district,
        |case when t2.shopId is not null then t2.district_origin  else  '0' end district_origin,
        |case when t2.shopId is not null then t2.economic_division  else  '0' end economic_division,
        |case when t2.shopId is not null then t2.if_city  else  '0' end if_city,
        |case when t2.shopId is not null then t2.if_district  else  '0' end if_district,
        |case when t2.shopId is not null then t2.if_state_level_new_areas  else  '0' end if_state_level_new_areas,
        |case when t2.shopId is not null then t2.latitude  else  '0' end latitude,
        |case when t2.shopId is not null then t2.longitude  else  '0' end longitude,
        |case when t2.shopId is not null then t2.name  else  '0' end name,
        |case when t2.shopId is not null then t2.poor_counties  else  '0' end poor_counties,
        |case when t2.shopId is not null then t2.province  else  '0' end province,
        |case when t2.shopId is not null then t2.regional_ID  else  '0' end regional_ID,
        |case when t2.shopId is not null then t2.registration_institution  else  '0' end registration_institution,
        |case when t2.shopId is not null then t2.rural_demonstration_counties  else  '0' end rural_demonstration_counties,
        |case when t2.shopId is not null then t2.rural_ecommerce  else  '0' end rural_ecommerce,
        |-- case when t2.shopId is not null then t2.shopId  else  '0' end shopId,
        |case when t2.shopId is not null then t2.the_belt_and_road_city  else  '0' end the_belt_and_road_city,
        |case when t2.shopId is not null then t2.the_belt_and_road_province  else  '0' end the_belt_and_road_province,
        |case when t2.shopId is not null then t2.the_yangtze_river_economic_zone_city  else  '0' end the_yangtze_river_economic_zone_city,
        |case when t2.shopId is not null then t2.the_yangtze_river_economic_zone_province  else  '0' end the_yangtze_river_economic_zone_province,
        |case when t2.shopId is not null then t2.town  else  '0' end town,
        |case when t2.shopId is not null then t2.urban_agglomerations  else  '0' end urban_agglomerations
        |from all t1 left join address t2
        |on t1.shopId = t2.shopId
        |""".stripMargin)
    frameAddress
  }
}
