package com.o2o.cleaning.month.platform.ebusiness_plat.jumei_2019_7.Jumei_utils

import org.apache.spark.sql.SparkSession

/**
  * @Description TODO 
  * @Author liutaowei
  * @Date 2018/11/5 19:59
  */
object excat_isonesell {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("taobao_isnosell")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    /**
      * 这个处理的是上月下架商品
      */
    /** *******************重要重要重要重要重要重要 ************************************************************/
    val year = "2019"
    val month = "6_02"
    val obsFs = s"s3a://o2o-dataproces-group/liu_taowei/"
    val platform_Name = "jumei"
//    上月原始数据路径
    val sourceData_url = "s3a://o2o-sourcedata/obs-source-2019/6/Jumei/jumei_2019_06_2/"
//    结果文件
    val resultUrl = obsFs +  s"month_data/plat_need/${platform_Name}/is_onsell_false/jumei_${year}_${month}"
    /** *******************重要重要重要重要重要重要 ************************************************************/

//读取上月原始数据路径
    val jumei_this = spark.read.json(sourceData_url)
//读取历史下架商品路径【每月下架商品写入到该路径下】
    val jumei_last = spark.read.json("s3a://o2o-dataproces-group/liu_taowei/month_data/plat_need/jumei/is_onsell_false/*")
                        .selectExpr("good_id","1 as add_stat")
                        .dropDuplicates("good_id")
//    从上月原始数据中过滤出下架商品【标签is_onsell=false】
    jumei_this.where("is_onsell = false")
      .select("good_id","is_onsell")
        .join(jumei_last,Seq("good_id"),"left")
        .where("add_stat is null").distinct()
//      .show()
      .repartition(1)
      .write.json(s"s3a://o2o-dataproces-group/liu_taowei/month_data/plat_need/jumei/is_onsell_false/jumei_${year}_${month}")

//=================================================================================================================================
//    spark.read.json("s3a://原始数据路径")
//    .where("is_onsell = false")
//    .select("good_id","is_onsell")
//    .registerTempTable("sourceTable")
//
//    spark.read.json("s3a://历史下架商品路径")
//      .selectExpr("good_id","1 as add_stat")
//      .dropDuplicates("good_id")
//      .registerTempTable("historyTable")
//
//    spark.sql(
//      """
//        |select a.*
//        |from sourceTable a
//        |left join
//        |historyTable b
//        |on a.good_id = b.good_id
//        |where b.good_id is null
//      """.stripMargin)









  }
}
