package a_o2odata_deal.utils

import com.alibaba.fastjson.JSON
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/2/1 10:41
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object modify_all_anchor_nick {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    import org.elasticsearch.spark._
    //加载主播数据
    val anchor_id = sqlContext.read.json(s"s3a://o2o-dataproces-group/panzonghao/tmall/2020/1/tmall_anchor_id_path/")
      .where("nick!='-1'").selectExpr("anchorId","nick").dropDuplicates("anchorId")
    anchor_id.createOrReplaceTempView("anchor_id")

    for (months <- 2 to 12){
      val value: RDD[String] = sc.esJsonRDD(s"2019_all_anchor/all_anchor_2019_${months}").values
      val source_data = sqlContext.read.json(value)
      source_data.createOrReplaceTempView("source_data")

      val result = sqlContext.sql(
        s"""
           |select
           |t1.*,
           |case when t2.anchorId is null then '-1' else t2.nick end as nick
           |from source_data t1
           |left join
           |anchor_id t2
           |on t1.anchorId=t2.anchorId
         """.stripMargin)

      //println(result.count())
      //println(result.where("nick!='-1'").count())


      result.toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"2019_all_anchor/all_anchor_2019_${months}", Map("es.mapping.id" -> "anchorId"))

    }

  }

}
