package a_o2odata_deal.utils.zhibo_qy

import a_aa_amainpackage.a_o2odata_deal.config.config.{months, years}
import org.apache.spark.sql.SQLContext

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/2/11 11:21
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object update_all_userid {
  def update_all_userid(sqlContext:SQLContext):Unit={
    import org.apache.spark.sql.functions._
    val tb_month12 = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_finally")
      .selectExpr("userId","regional_ID","province","city","district","timeStamp").withColumn("platForm",lit("taobao")).dropDuplicates()

    val tm_month12 = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_finally")
      .selectExpr("userId","regional_ID","province","city","district","timeStamp").withColumn("platForm",lit("tmall")).dropDuplicates()

    tb_month12.unionAll(tm_month12).repartition(4)
      .write.json(s"s3a://o2o-dataproces-group/panzonghao/zhibo_zf/backups/all_userid/${years}/${months}")
  }

}
