package com.o2o.cleaning.month.platform.ebusiness_plat.meituan

import com.o2o.utils.Iargs
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object MeituanDeal {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
//      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    val path = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_final_union_bu_result_data_new/"
//    val path = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/finalResultData_new/"

//    val bupath = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_11_5_bu_result_data/"

    val lastpath = "s3a://dws-data/g_data/2019/10/meituan/"


//    spark.read.orc(path).printSchema()
//    spark.read.orc(bupath).printSchema()

//    updateyouy_province_2(spark,spark.read.orc(path)).registerTempTable("tab")
    spark.read.orc(path).registerTempTable("tab")
    spark.read.orc(lastpath).registerTempTable("lasttab")

    println("2020年按省")
    spark.sql(
      """
        |
        |select
        |t1.province,
        |t1.shopnum,
        |t1.num,
        |t1.sell,
        |(t1.sell/t2.sell-1)*100 sell_tb,
        |t1.sale,
        |(t1.sale/t2.sale-1)*100 sale_tb
        |from
        |(select
        |province,
        |count(1) num,
        |count(distinct shopId) shopnum,
        |sum(sellCount) sell,
        |sum(salesAmount) sale
        |from
        |tab
        |group by province
        |) t1
        |left join
        |(select
        |province,
        |count(1) num,
        |count(distinct shopId) shopnum,
        |sum(sellCount) sell,
        |sum(salesAmount) sale
        |from
        |lasttab
        |group by province
        |) t2
        |on t1.province=t2.province
      """.stripMargin).repartition(1).write.option("header",true).csv("s3a://o2o-dataproces-group/zsc/product/meituan/2020/10/normal")


    println("2020年美食按省")
    spark.sql(
      """
        |
        |select
        |t1.province,
        |t1.shopnum,
        |t1.num,
        |t1.sell,
        |(t1.sell/t2.sell-1)*100 sell_tb,
        |t1.sale,
        |(t1.sale/t2.sale-1)*100 sale_tb
        |from
        |(select
        |province,
        |count(1) num,
        |count(distinct shopId) shopnum,
        |sum(sellCount) sell,
        |sum(salesAmount) sale
        |from
        |tab
        |where firstCategoryId='10028'
        |group by province
        |) t1
        |left join
        |(select
        |province,
        |count(1) num,
        |count(distinct shopId) shopnum,
        |sum(sellCount) sell,
        |sum(salesAmount) sale
        |from
        |lasttab
        |where firstCategoryId='10028'
        |group by province
        |) t2
        |on t1.province=t2.province
      """.stripMargin).repartition(1).write.option("header",true).csv("s3a://o2o-dataproces-group/zsc/product/meituan/2020/10/meishi")



    sc.stop()
  }

  def updateyouy_province_2(spark: SparkSession, data: DataFrame): DataFrame = {
    data.withColumnRenamed("sellCount", "sellCount_bak").drop("salesAmount")
      .createOrReplaceTempView("updateyoy_province")
    spark.sql(
      """|select *,sellCount * priceText as salesAmount
         |from (
         |select *,
         |case
         |when province ='北京市' then round(sellCount_bak *1.25)
         |when province ='浙江省' then round(sellCount_bak * 1.33)
         |when province ='重庆市' then round(sellCount_bak * 1.18)
         |when province ='陕西省' then round(sellCount_bak * 1.21)
         |when province ='湖北省' then round(sellCount_bak * 1.38)
         |when province ='上海市' then round(sellCount_bak * 1.50)
         |when province ='江苏省' then round(sellCount_bak * 1.41)
         |when province ='新疆维吾尔自治区' then round(sellCount_bak * 0.86)
         |when province ='西藏自治区' then round(sellCount_bak * 0.9)
         |else sellCount_bak end sellCount
         |from updateyoy_province
         |)
         |""".stripMargin).drop("sellCount_bak")
  }
}
