package a_o2odata_deal.utils.zhibo_zf

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/2/6 09:43
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object update_all_userid {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    //conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    //conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    //sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")


    import org.apache.spark.sql.functions._
    val tb_month12 = sqlContext.read.orc("s3a://o2o-dataproces-group/panzonghao/taobao/2020/1/zhibo_finally")
      .selectExpr("userId","regional_ID","province","city","district","timeStamp").withColumn("platForm",lit("taobao")).dropDuplicates()

    val tm_month12 = sqlContext.read.orc("s3a://o2o-dataproces-group/panzonghao/tmall/2020/1/zhibo_finally")
      .selectExpr("userId","regional_ID","province","city","district","timeStamp").withColumn("platForm",lit("tmall")).dropDuplicates()

    tb_month12.unionAll(tm_month12).repartition(4)
      .write.json("s3a://o2o-dataproces-group/panzonghao/zhibo_zf/backups/all_userid/202001")
  }

}
