package com.o2o.cleaning.month.platform.ebusiness_plat.rongegou

import com.alibaba.fastjson.JSON
import com.mongodb.spark.MongoSpark
import com.o2o.cleaning.month.platform.ebusiness_plat.brand_modular.brand_join_res
import com.o2o.utils.times.TimesYearAll
import org.apache.commons.codec.digest.DigestUtils
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @author: gaoyadi
  * @Date: 2018/6/21 10:20
  * @Description:执行时需要修改的变量 month timeStamp
  * @Modify By:
  */
object RongYiGou {

  var year = 2021
  //平台名称
  var platform = "rongyigou"
  //当月的月份
  var month = "12"
  //每个月固定时间戳
  var timeStamp = TimesYearAll.TIME202112
  //mongo库中的库名称
  var database = "Icbc"
  //mongo库中的集合名称
  var collection = "rongyigou_2112" //rongyigou_1909

  //每个月的地址路径
  var address = s"s3a://o2o-dimension-table/address_table/address_table_2021/${month}/address_platform/icbc_address_2021_${month}/*"

  //三级id分类路径
  var subPath = "s3a://o2o-dimension-table/category_table/cate/cate0401/rongyigou/subCategoryId/*"
  //原始数据路径
  var sourcePath = s"s3a://o2o-sourcedata-2021/obs-source-2021/2021/${month}/${platform}/${collection}"
  //商品的清洗结果路径
  var resultPath = s"s3a://o2o-dataproces-group/zyf/2021/${month}/${platform}/good"
  var resultFinalPath = s"s3a://o2o-dataproces-group/zyf/2021/${month}/${platform}/good_final"
  //提取的店铺路径
  var shopPath = s"s3a://o2o-dataproces-group/zyf/2021/${month}/${platform}/shop"
  //校验路径
  var strPath = "s3a://o2o-dataproces-group/zyf/"

  def main(args: Array[String]): Unit = {

    //spark mongo连接配置
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MongoSparkConnectorIntro")
      .config("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.0.149:27017/admin")
      .config("spark.mongodb.input.database", s"${database}")
      .config("spark.mongodb.input.collection", s"${collection}")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    //obs设置
    val sc: SparkContext = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    //拉取原始数据
    mongoExport(spark, sc, month, platform, database, collection)
    //计算加清洗
    val frame2 = rongyigouCaculate(spark, sourcePath).drop("add_to_field")

    println("原始的数据：" + frame2.count())

    //打标签
    var s_002 = frame2
      .withColumn("timeStamp", lit(s"${timeStamp}")).withColumn("platformName", lit("融e购"))
      .withColumn("platformId", lit("26")).withColumn("shopType", lit("B"))
      .where("sellCount>0").where("priceText*1.0 >0")

    s_002.registerTempTable("t1")
    println("销售量和销售额：")
    spark.sql(
      """
        |select count(1) count, cast(sum(sellCount) as bigInt) sell,cast(sum(salesAmount) as decimal(20,2)) sale from t1
      """.stripMargin
    ).show()

    //关联分类
    val frame = rongyigouCate(spark, s_002)
    //关联地址
    val frame1 = addressRongyigou(spark, frame, address)

    println("省份为0的数据：" + frame1.where("province = '0'").count())

    //商品数据落地
    frame1.repartition(1).write.orc(resultPath)

    val resDF: DataFrame = spark.read.orc(resultPath)

    val brand = new brand_join_res
    //    读取老品牌表路径
    brand.brandJoinResult(resDF, resultFinalPath, year, month.toInt, platform, spark)

    spark.stop()

  }


  /** *
    * 从mongo中拉取数据
    *
    * @param month
    * @param platform
    * @param database
    * @param collection
    */
  def mongoExport(spark: SparkSession, sc: SparkContext, month: String, platform: String, database: String, collection: String): Unit = {

    val rdd = MongoSpark.load(sc)

    val values = rdd.map(line => {
      val nObject = JSON.parseObject(line.toJson())

      //所有字段转为string
      try {
        val keys = nObject.keySet().iterator()
        while (keys.hasNext) {
          var key = keys.next()
          nObject.put(key, nObject.get(key).toString)
        }
      } catch {
        case e: Exception =>
          println("===============" + nObject)
      }

      nObject.remove("_id")
      nObject.toString
    }).cache()
    spark.read.json(values).repartition(1).write.orc(sourcePath)
    //    spark.stop()
  }


  /** *
    * 商品清洗计算
    *
    * @param spark
    * @return 带有销售额的所有商品
    */
  def rongyigouCaculate(spark: SparkSession, sourthPath: String): DataFrame = {

    val qichezhijia_1 = spark.read.orc(sourthPath).toJSON.rdd.map(line => {
      val nObject = JSON.parseObject(line)
      //处理品牌id
      val brandValueId = nObject.getOrDefault("brandValueId", "-1").toString
      val brandName = nObject.getOrDefault("brandName", "-1").toString
      if (brandValueId.equals("-1") && !brandName.equals("-1")) {
        //id -1  name apple /   -1  xiaomi  / -1  mi
        val brandValueId_1 = DigestUtils.md5Hex(brandName.toUpperCase())
        nObject.put("brandValueId", brandValueId_1)
      }
      /** *
        * 提取相关信息
        */
      val good_id = nObject.get("good_id")
      val base = nObject.getOrDefault("Base_Info", "-1").toString
      var originItem = "-1"
      if (!base.equals("-1")) {
        val baseinfo = JSON.parseObject(base)
        var originItem = baseinfo.getOrDefault("商品货号", "-1").toString
        if (originItem.contains("/") || originItem.contains("无")) {
          originItem = "-1"
        }
      }

      val array = nObject.getJSONArray("add_to_field")
      var sellCount = 0
      var lastQuantity = 0
      var priceText = "0"

      try {
        for (i <- 0 to array.size() - 1) {
          val nObject1 = array.getJSONObject(i)
          val sellCount_0 = nObject1.get("sellCount").toString.toInt
          if (i != 0 && (sellCount_0 > lastQuantity)) {
            sellCount += sellCount_0 - lastQuantity
          }
          lastQuantity = sellCount_0

          //最后一天的价格
          if (i + 1 == array.size()) {
            priceText = nObject1.get("priceText").toString
            nObject.put("priceText_collect", priceText)
            //对价格进行计算
            if (priceText.equals("-1")) {
              priceText = "0"
            } else if (priceText.contains("-")) {
              priceText = priceText.split("-")(0)
            } else if (priceText.contains(",")) {
              priceText = priceText.replace(",", "")
            } else if (priceText.contains("\"\"")) {
              priceText = priceText.split("\"\"")(0)
            } else {
              priceText
            }
          }

        }
      } catch {
        case e: Exception => println(e)
          println("nObject : --> " + nObject)
      }

      if (priceText.contains(",")) {
        priceText = priceText.replace(",", "")
      }


      var original_cost = nObject.getOrDefault("original_cost", "0").toString
      if (original_cost.contains("-")) {
        original_cost = original_cost.split("-")(0)
      }

      //***************************修改部分***************************//

      //      val sells: Int = (sellCount * 30 / 27).toInt

      //***************************修改部分***************************//

      //      var salesAmount = (priceText.toDouble * (sellCount)).toDouble.formatted("%.2f")
      var salesAmount = (priceText.toDouble * (sellCount)).toDouble.formatted("%.2f")

      //      nObject.put("sellCount", sells)
      nObject.put("sellCount", sellCount)
      nObject.put("Base_Info", base)
      nObject.put("originItem", originItem)
      nObject.put("original_cost", original_cost)
      nObject.put("priceText", priceText)
      nObject.put("salesAmount", salesAmount)
      nObject.remove("add_to_field")
      nObject.toString
    })
    spark.read.json(qichezhijia_1)
  }

  /** *
    * 关联分类
    *
    * @param spark
    * @param frame
    * @return 关联好分类的所有商品信息
    */
  def rongyigouCate(spark: SparkSession, frame: DataFrame): DataFrame = {

    frame.registerTempTable("JAN_101")
    spark.read.json(subPath).dropDuplicates("subCategoryId").registerTempTable("cate")
    var frame2 = spark.sql(
      """
        |select
        |*,
        |case
        |when subCategoryId= '47006001' then '10021'
        |when subCategoryId= '43007011' then '10022'
        |when subCategoryId= '43020007' then '10022'
        |when subCategoryId= '30004002' then '10015'
        |when subCategoryId= '47004005' then '10021'
        |when subCategoryId= '47012002' then '10021'
        |when subCategoryId= '46001010' then '10014'
        |when subCategoryId= '46001009' then '10014'
        |when subCategoryId= '43025002' then '10022'
        |when subCategoryId= '47005001' then '10021'
        |
        |else '10099'
        |end firstCategoryId,
        |
        |case
        |when subCategoryId= '47006001' then '1002107'
        |when subCategoryId= '43007011' then '1002213'
        |when subCategoryId= '43020007' then '1002205'
        |when subCategoryId= '30004002' then '1001503'
        |when subCategoryId= '47004005' then '1002101'
        |when subCategoryId= '47012002' then '1002101'
        |when subCategoryId= '46001010' then '1001406'
        |when subCategoryId= '46001009' then '1001406'
        |when subCategoryId= '43025002' then '1002202'
        |when subCategoryId= '47005001' then '1002102'
        |
        |else '1009999'
        |end secondCategoryId,
        |
        |
        |
        |case
        |when subCategoryId= '47006001' then (case when title rlike '白酒' then '100210701' else '100210705' end)
        |when subCategoryId= '43007011' then (case when title rlike '浴霸' then '100221304' else '100221302' end)
        |when subCategoryId= '43020007' then (case when title rlike '蚊帐' then '100220505' else '100220506' end)
        |when subCategoryId= '30004002' then (case when title rlike '单反' then '100150301' else '100150302' end)
        |when subCategoryId= '47004005' then (case when title rlike '巧克力' and title not rlike '糖果' then '100210101' else '100210103' end)
        |when subCategoryId= '47012002' then (case when title rlike '巧克力' and title not rlike '糖果' then '100210101' else '100210103' end)
        |when subCategoryId= '46001010' then (case when title rlike '洗碗机'  then '100140605' else '100140604' end)
        |when subCategoryId= '46001009' then (case when title rlike '燃气'  then '100140602' else '100140601' end)
        |when subCategoryId= '43025002' then (case when title rlike '洗衣液'  then '100220201' else '100220202' end)
        |when subCategoryId= '47005001' then (case when title rlike '米'  then '100210203' when title rlike '面' then '100210204' else '100210205' end)
        |
        |else '100999999'
        |end thirdCategoryId,
        |
        |case
        |when subCategoryId= '47006001' then '10099999999'
        |when subCategoryId= '43007011' then '10099999999'
        |when subCategoryId= '43020007' then '10099999999'
        |when subCategoryId= '30004002' then '10099999999'
        |when subCategoryId= '47004005' then '10099999999'
        |when subCategoryId= '47012002' then '10099999999'
        |when subCategoryId= '46001010' then '10099999999'
        |when subCategoryId= '46001009' then '10099999999'
        |when subCategoryId= '43025002' then '10099999999'
        |when subCategoryId= '47005001' then '10099999999'
        |
        |else '10099999999'
        |end fourthCategoryId
        |
        |
        |from
        |JAN_101
        |
        |where
        |subCategoryId= '47006001'
        |or subCategoryId= '43007011'
        |or subCategoryId= '43020007'
        |or subCategoryId= '30004002'
        |or subCategoryId= '47004005'
        |or subCategoryId= '47012002'
        |or subCategoryId= '46001010'
        |or subCategoryId= '46001009'
        |or subCategoryId= '43025002'
        |or subCategoryId= '47005001'
      """.stripMargin
    )

    var frame1 = spark.sql(
      """
        |select
        |a.*,
        |IFNULL(b.firstCategoryId,'10099') firstCategoryId,
        |IFNULL(b.secondCategoryId,'1009999') secondCategoryId,
        |IFNULL(b.thirdCategoryId,'100999999') thirdCategoryId,
        |IFNULL(b.fourthCategoryId,'10099999999') fourthCategoryId
        |from
        |JAN_101 a
        |left join
        |cate b
        |on
        |a.subCategoryId = b.subCategoryId
        |where
        |a.subCategoryId != '47006001'
        |and a.subCategoryId != '43007011'
        |and a.subCategoryId != '43020007'
        |and a.subCategoryId != '30004002'
        |and a.subCategoryId != '47004005'
        |and a.subCategoryId != '47012002'
        |and a.subCategoryId != '46001010'
        |and a.subCategoryId != '46001009'
        |and a.subCategoryId != '43025002'
        |and a.subCategoryId != '47005001'
      """.stripMargin)

    /**
      *
      * 两部分合并处理10099的问题
      */
    var t01 = frame1.union(frame2)

    t01.registerTempTable("f1")

    spark.sql(
      """
        |select
        | *,
        | firstCategoryId as firstCategoryId1,
        | case when secondCategoryId = '1009999' then concat(firstCategoryId,'99') else secondCategoryId end secondCategoryId1
        | from
        | f1
      """.stripMargin)
      .registerTempTable("f2")
    spark.sql(
      """
        |select
        | *,
        | case when thirdCategoryId = '100999999' then concat(secondCategoryId1,'99') else  thirdCategoryId end thirdCategoryId1
        | from
        | f2
      """.stripMargin)
      .registerTempTable("f3")
    spark.sql(
      """
        |select
        | *,
        | case when fourthCategoryId = '10099999999' then concat(thirdCategoryId1,'99') else  fourthCategoryId end fourthCategoryId1
        | from
        | f3
      """.stripMargin)
      .registerTempTable("f4")
    var t0_1 = spark.sql(
      """
        |select
        |*
        |from
        |f4
      """.stripMargin).drop("firstCategoryId", "secondCategoryId", "thirdCategoryId", "fourthCategoryId")
      .withColumnRenamed("firstCategoryId1", "firstCategoryId")
      .withColumnRenamed("secondCategoryId1", "secondCategoryId")
      .withColumnRenamed("thirdCategoryId1", "thirdCategoryId")
      .withColumnRenamed("fourthCategoryId1", "fourthCategoryId")

    t0_1
  }

  /** *
    * 关联地址
    *
    * @param spark
    * @param frame
    * @param addressPath 每个月处理的地址路径
    * @return 关联好地址的所有商品信息
    */
  def addressRongyigou(spark: SparkSession, frame: DataFrame, addressPath: String): DataFrame = {
    frame.registerTempTable("all")
    val add = spark.read.json(addressPath)
    add.registerTempTable("address")
    //关联不到的地址打上0的信息
    var address = spark.sql(
      """
        |select t1.*,
        |case when t2.shopId is not null then t2.administrative_region  else  '0' end administrative_region,
        |case when t2.shopId is not null then t2.city  else  '0' end  city,
        |case when t2.shopId is not null then t2.city_grade  else  '0' end  city_grade,
        |case when t2.shopId is not null then t2.city_origin  else  '0' end  city_origin,
        |case when t2.shopId is not null then t2.district  else  '0' end district,
        |case when t2.shopId is not null then t2.district_origin  else  '0' end  district_origin,
        |case when t2.shopId is not null then t2.economic_division  else  '0' end  economic_division,
        |case when t2.shopId is not null then t2.if_city  else '0' end  if_city,
        |case when t2.shopId is not null then t2.if_district  else  '0' end  if_district,
        |case when t2.shopId is not null then t2.if_state_level_new_areas  else  '0' end  if_state_level_new_areas,
        |case when t2.shopId is not null then t2.poor_counties  else  '0'  end  poor_counties,
        |case when t2.shopId is not null then t2.province  else  '0' end  province,
        |case when t2.shopId is not null then t2.regional_ID  else  '0' end  regional_ID,
        |case when t2.shopId is not null then t2.rural_demonstration_counties  else  '0' end rural_demonstration_counties,
        |case when t2.shopId is not null then t2.rural_ecommerce  else  '0' end  rural_ecommerce,
        |case when t2.shopId is not null then t2.the_belt_and_road_city  else  '0' end  the_belt_and_road_city,
        |case when t2.shopId is not null then t2.the_belt_and_road_province  else  '0' end  the_belt_and_road_province,
        |case when t2.shopId is not null then t2.the_yangtze_river_economic_zone_city  else  '0' end  the_yangtze_river_economic_zone_city,
        |case when t2.shopId is not null then t2.the_yangtze_river_economic_zone_province  else  '0' end  the_yangtze_river_economic_zone_province,
        |case when t2.shopId is not null then t2.urban_agglomerations  else  '0' end  urban_agglomerations,
        |case when t2.shopId is not null then t2.name  else  '中国工商银行' end  name,
        |case when t2.shopId is not null then t2.address  else  '0' end  address,
        |case when t2.shopId is not null then t2.aedzId  else  '-1' end  aedzId,
        |case when t2.shopId is not null then t2.latitude  else  '-1' end latitude,
        |case when t2.shopId is not null then t2.longitude  else  '-1' end longitude,
        |case when t2.shopId is not null then t2.town  else  '-1' end town,
        |case when t2.shopId is not null then t2.registration_institution  else  '-1' end registration_institution
        | from all t1 left join address t2
        |on t1.shopId = t2.shopId
      """.stripMargin
    )
    address

  }


  /** *
    * 提取店铺
    *
    * @param spark
    * @param frame
    * @param shopPath 提取的店铺路径
    */
  def shopTiQu(spark: SparkSession, frame: DataFrame, shopPath: String): Unit = {
    frame.registerTempTable("gao_0021")
    val gao_0022 = spark.sqlContext.sql(
      """
        |select shopId,sum(sellCount) as totalSellCount,cast(sum(salesAmount)as decimal(20,2)) as totalSalesAmount from gao_0021 group by shopId
      """.stripMargin)
    println("店铺个数：" + gao_0022.count())
    gao_0022.registerTempTable("gao_0022")
    spark.sqlContext.sql(
      """
        |
        |select
        |a.shopId,
        |shopUrl,
        |shopType,
        |timeStamp,
        |platformId,
        |platformName,
        |evaluates,
        |administrative_region,
        |city,
        |city_grade,
        |city_origin,
        |district,
        |district_origin,
        |economic_division,
        |if_city,
        |if_district,
        |if_state_level_new_areas,
        |poor_counties,
        |province,
        |regional_ID,
        |rural_demonstration_counties,
        |rural_ecommerce,
        |the_belt_and_road_city,
        |the_belt_and_road_province,
        |the_yangtze_river_economic_zone_city,
        |the_yangtze_river_economic_zone_province,
        |urban_agglomerations,
        |registration_institution,
        |address,
        |latitude,
        |aedzId,
        |town,
        |longitude,
        |a.totalSellCount,
        |a.totalSalesAmount
        |from
        |gao_0022 a
        |left join
        |gao_0021 b
        |on a.shopId=b.shopId
      """.stripMargin)
      .dropDuplicates("shopId").repartition(1).write.json(shopPath)
  }
}

