package com.o2o.cleaning.month.platform.ebusiness_plat.meituan

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/11/5 17:58
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object MeituanUnion {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
//      .master("local[*]")
      .appName("MeituanUnion")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
//    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val path = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/finalResultData_new/"

    val bupath = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_11_5_bu_result_data/"

    val lastpath = "s3a://dws-data/g_data/2019/10/meituan/"


    val value: RDD[String] = updateyouy_province_2(spark, spark.read.orc(path)).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      val sellCount: Long = nObject.getDouble("sellCount").toLong

      nObject.put("sellCount", sellCount)


      nObject.toString
    })

    val value1: RDD[String] = spark.read.orc(bupath).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      nObject.toString
    })


    val sourDF = spark.read.json(value).selectExpr("address","administrative_region","aedzId","categoryId","categoryName","city","city_grade","city_origin","county","district","district_origin","dpShopId","economic_division","emotionalKeywords","evaluates","firstCategoryId","flavors","food_type","fourthCategoryId","goodDescription","goodRatePercentage","good_id","if_city","if_district","if_state_level_new_areas","images","is_brand","is_premium","latitude","licencePics","longitude","mtWmPoiId","mtWmPoiIdNew","opening_hours","order_lead_time","original_cost","packing_fee","phone","platformId","platformName","poor_counties","praiseNum","priceText","promotion_info","province","regional_ID","registration_institution","rootCategoryId","rootCategoryName","rural_demonstration_counties","rural_ecommerce","secondCategoryId","shopCommentCount","shopDescription","shopId","shopImages","shopImg","shopName","shopSellCount","shopUrl","shopUuid","shop_open","shop_open_day","star","street","the_belt_and_road_city","the_belt_and_road_province","the_yangtze_river_economic_zone_city","the_yangtze_river_economic_zone_province","thirdCategoryId","third_category_name","timeStamp","title","town","urban_agglomerations","sellCount","salesAmount")

    val buDF = spark.read.json(value1).selectExpr("address","administrative_region","aedzId","categoryId","categoryName","city","city_grade","city_origin","county","district","district_origin","dpShopId","economic_division","emotionalKeywords","evaluates","firstCategoryId","flavors","food_type","fourthCategoryId","goodDescription","goodRatePercentage","good_id","if_city","if_district","if_state_level_new_areas","images","is_brand","is_premium","latitude","licencePics","longitude","mtWmPoiId","mtWmPoiIdNew","opening_hours","order_lead_time","original_cost","packing_fee","phone","platformId","platformName","poor_counties","praiseNum","priceText","promotion_info","province","regional_ID","registration_institution","rootCategoryId","rootCategoryName","rural_demonstration_counties","rural_ecommerce","secondCategoryId","shopCommentCount","shopDescription","shopId","shopImages","shopImg","shopName","shopSellCount","shopUrl","shopUuid","shop_open","shop_open_day","star","street","the_belt_and_road_city","the_belt_and_road_province","the_yangtze_river_economic_zone_city","the_yangtze_river_economic_zone_province","thirdCategoryId","third_category_name","timeStamp","title","town","urban_agglomerations","sellCount","salesAmount")

    val allDF: Dataset[Row] = sourDF.union(buDF)

    allDF.write.orc("s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_final_union_bu_result_data_new")

  }

  def updateyouy_province_2(spark: SparkSession, data: DataFrame): DataFrame = {
    data.withColumnRenamed("sellCount", "sellCount_bak").drop("salesAmount")
      .createOrReplaceTempView("updateyoy_province")
    spark.sql(
      """|select *,sellCount * priceText as salesAmount
         |from (
         |select *,
         |case
         |when province ='北京市' then round(sellCount_bak *1.20)
         |when province ='浙江省' then round(sellCount_bak * 1.33)
         |when province ='重庆市' then round(sellCount_bak * 1.18)
         |when province ='陕西省' then round(sellCount_bak * 1.21)
         |when province ='湖北省' then round(sellCount_bak * 1.38)
         |when province ='上海市' then round(sellCount_bak * 1.50)
         |when province ='江苏省' then round(sellCount_bak * 1.41)
         |when province ='新疆维吾尔自治区' then round(sellCount_bak * 0.86)
         |when province ='西藏自治区' then round(sellCount_bak * 0.9)
         |when province ='辽宁省' then round(sellCount_bak * 1.05)
         |when province ='湖北省' then round(sellCount_bak * 0.93)
         |when province ='陕西省' then round(sellCount_bak * 0.93)
         |when province ='天津市' then round(sellCount_bak * 1.12)
         |when province ='黑龙江省' then round(sellCount_bak * 1.32)
         |when province ='甘肃省' then round(sellCount_bak * 1.1)
         |else sellCount_bak end sellCount
         |from updateyoy_province
         |)
         |""".stripMargin).drop("sellCount_bak")
  }
}
