package com.o2o.cleaning.month.platform.ebusiness_plat.meituan

import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/11/5 20:42
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object MeituanSample {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
            .master("local[*]")
      .appName("MeituanUnion")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.crossJoin.enabled", true)
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
        sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val path = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/finalResultData_new/"

    val bupath = "s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_11_5_bu_result_data/"

    val lastpath = "s3a://dws-data/g_data/2020/9/meituan/"

    spark.read.orc(bupath).registerTempTable("t1")

    spark.read.orc(lastpath).registerTempTable("t2")

    spark.sql(
      """
        |select
        |(a.sell/b.sell-1)*100 selltb,
        |(a.sales/b.sales-1)*100 saletb
        |from
        |(select
        |'1' as joinkey,
        |sum(t1.sellCount) sell,
        |sum(t1.salesAmount) sales
        |from
        |t1
        |left join
        |t2
        |on t1.good_id=t2.good_id
        |where t2.good_id is not null) a
        |left join
        |(select
        |'1' as joinkey,
        |sum(t2.sellCount) sell,
        |sum(t2.salesAmount) sales
        |from
        |t1
        |left join
        |t2
        |on t1.good_id=t2.good_id
        |where t2.good_id is not null) b
        |on a.joinkey=b.joinkey
      """.stripMargin).show(false)

  }
}
