package com.o2o.cleaning.month.platform.ebusiness_plat.jumei_2019_7.Jumei_utils

import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._
/**
  * @Description TODO 
  * @Author liutaowei
  * @Date 2018/12/4 16:45
  */
object reget_jumei_data {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("SparkTest")
      .master("local[*]")
      .config("es.nodes", "192.168.2.247")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .getOrCreate()

    var clusterName = "hdfs://192.168.2.41:9000"
    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "I6X0QOBRFQNDXGH6AMEU")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "uRrV8loOf6OszhzXz5GkkzBEQe5BY03vq5NEEWUw")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    val plat_name = "jumei"
    val coll = s"2018_${plat_name}_11/type_1"

    val value = sc.esJsonRDD(coll).values

//    value.repartition(1).saveAsTextFile(s"D:\\sss\\test\\platData\\11\\${plat_name}_1")

    val no_sell = spark.read.json("s3a://o2o-dataproces-group/liu_taowei/month_data/plat_need/jumei/jumei_reget_falsesell")
        .selectExpr("*","1 as add_st")
      .dropDuplicates("good_id")
    spark.read.json(value).join(no_sell,Seq("good_id"),"left")
      .where("add_st is  null").repartition(1).write.json(s"D:\\\\sss\\\\test\\\\platData\\\\11\\\\${plat_name}_2")
  }
}
