package com.o2o.cleaning.month.platform.ebusiness_plat.meituan

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @author chenlixiu
  * @version
  * @since JDK 1.8
  */
object TuiSuanXiShu {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("ModifyEsData")
        // .master("local[*]")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .getOrCreate()
    val sc = spark.sparkContext
    val sqlContext = spark.sqlContext
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    val month = 10
    /*val sourceDataCurrentMonth = spark.read.orc("s3a://o2o-dataproces-group/yang_songjian/product/meituan/2020/9/finalResultData/")
    val resultDataCurrentMonth: DataFrame = com.o2o.taobao.repast.monitor.UpdateRepastResult.updateyouy_province(spark, sourceDataCurrentMonth)
    resultDataCurrentMonth.registerTempTable("value")
    //转换类型
    val sellResult_df = spark.sql("select *,CAST(sellCount as bigint) sellCounts from value")
      .drop("sellCount")
      .withColumnRenamed("sellCounts", "sellCount")
      .createOrReplaceTempView("DataMonth_9")

    val aa = spark.sql(
      """
        |select
        |*,
        |sellCount * priceTexts as salesAmounts
        |from
        |(
        |select
        |t1.*,
        |case when t2.shopId is null then t1.priceText else
        |case when instr(priceText,'.') >0
        |then ROUND((cast(priceText as double) +1+rand()),1)
        |else
        |(cast(priceText as bigint) +1) end  end as priceTexts
        |from
        |DataMonth_9 t1
        |left join
        |(select
        |shopId,
        |sum(salesAmount) sales_6
        |from
        |DataMonth_9
        |group by shopId
        |order by sales_6 desc
        |limit 20000) t2
        |on t1.shopId = t2.shopId)
        |""".stripMargin)
      .drop("priceText", "salesAmount")
      .withColumnRenamed("priceTexts", "priceText")
      .withColumnRenamed("salesAmounts", "salesAmount")
      .dropDuplicates("good_id")
    aa.createOrReplaceTempView("DataMonth_99")

    spark.sql(
      """
        |select
        |count_6,
        |sell_6,
        |sales_6/sell_6  junjia,
        |sales_6,
        |shop_count
        |from
        |(
        |select
        |count(distinct good_id) count_6,
        |sum(sellCount) sell_6,
        |sum(salesAmount) sales_6,
        |count(distinct shopId)  shop_count
        |from
        |DataMonth_99)
        |""".stripMargin)
      // aa.selectExpr("sales_6").show(false)
      .show(false)

   val resultDataCurrentMonth_2: DataFrame = com.o2o.taobao.repast.monitor.UpdateRepastResult.updateyouy_province_2(spark, aa)

    resultDataCurrentMonth_2.repartition(5).write.orc(s"s3a://o2o-dataproces-group/yang_songjian/product/meituan/2020/${month}/finalResultData_tuishu2/")

   */
    val resultDataCurrentMonth_2= spark.read.orc("s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/finalResultData_new/")

    val resultDataCurrentMonth_3: DataFrame = updateyouy_province_2(spark, resultDataCurrentMonth_2)

    resultDataCurrentMonth_3.registerTempTable("value")
    //转换类型
    val sellResult_df = spark.sql("select *,CAST(sellCount as bigint) sellCounts from value")
      .drop("sellCount")
      .withColumnRenamed("sellCounts", "sellCount")
    sellResult_df.write.orc(s"s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/${month}/finalResultData_tuishu3/")

    val lastMonthData: DataFrame = spark.read.orc("s3a://dws-data/g_data/2019/10/meituan/")
      //结果数据路径-省份同比占比
      val provinceyoy = s"s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/${month}/province_tongbi_tuishu3"
      //四大区同比占比路径
      val economic_division_yoyzhanbi = s"s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/${month}/economic_division_tongbi_tuishu3"
//      MeituanTuisuan.jisuanyoyandzhabi(spark, lastMonthData, sellResult_df, provinceyoy, economic_division_yoyzhanbi)


    sc.stop()

  }

  def updateyouy_province_2(spark: SparkSession, data: DataFrame): DataFrame = {
    data.withColumnRenamed("sellCount", "sellCount_bak").drop("salesAmount")
      .createOrReplaceTempView("updateyoy_province")
    spark.sql(
      """|select *,sellCount * priceText as salesAmount
         |from (
         |select *,
         |case
         |when province ='北京市' then round(sellCount_bak *0.25)
         |when province ='浙江省' then round(sellCount_bak * 0.33)
         |when province ='重庆市' then round(sellCount_bak * 0.18)
         |when province ='陕西省' then round(sellCount_bak * 0.21)
         |when province ='湖北省' then round(sellCount_bak * 1.38)
         |when province ='上海市' then round(sellCount_bak * 0.50)
         |when province ='江苏省' then round(sellCount_bak * 0.41)
         |else sellCount_bak end sellCount
         |from updateyoy_province
         |)
         |""".stripMargin).drop("sellCount_bak")
  }
}
