package a_o2odata_deal.utils

import java.text.SimpleDateFormat
import java.util.Date

import a_o2odata_deal.utils.handle_zhibo_zhubo_loop.tranTimeToLong
import com.alibaba.fastjson.JSON
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/12/19 15:47
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object handle_zhibo_v3_loop {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    sqlContext.udf.register("tranTimeToString",tranTimeToString _)

    import org.elasticsearch.spark._
    val data_id = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/11/tmall_live_id_191101_before")
    data_id.createOrReplaceTempView("data_id")
    data_id.where("cast(anchorFans as bigint)!=0").createOrReplaceTempView("data_id_tmp")

    for (months <- 2 to 10){
      //取出最后一天粉丝数不是0的
      sqlContext.sql(
        s"""
           |select
           |anchorId,
           |anchorFans
           |from
           |(select
           |anchorId,
           |anchorFans,
           |row_number() over(partition by anchorId order by cast(liveTime as bigint) desc) as rank
           |from (select * from data_id where tranTimeToString(liveTime)='2019-${months}')
           |) where rank=1
         """.stripMargin).createOrReplaceTempView("mid1")


      sqlContext.sql(
        s"""
           |select
           |anchorId,
           |count(distinct liveId) as liveCount,
           |sum(viewerCount) as viewerCount,
           |--sum(anchorFans) as anchorFans,
           |sum(commodityCount) as liveGoodCount
           |from (select * from data_id where tranTimeToString(liveTime)='2019-${months}')
           |group by anchorId
       """.stripMargin).createOrReplaceTempView("mid2")

      val result = sqlContext.sql(
        s"""
           |select
           |t1.*,
           |case
           |when t2.anchorId is null then 0
           |else t2.anchorFans
           |end as anchorFans
           |from mid2 t1
           |left join
           |mid1 t2
           |on t1.anchorId=t2.anchorId
         """.stripMargin)


      import org.apache.spark.sql.functions._
      if (months==2){
        result.withColumn("timeStamp",lit(tranTimeToLong(s"2019-${months}-28 00:00:00"))).toJSON.rdd.map(line =>{
          JSON.parseObject(line)
        }).saveToEs(s"2019_all_anchor/all_anchor_2019_${months}", Map("es.mapping.id" -> "anchorId"))
      }else{
        result.withColumn("timeStamp",lit(tranTimeToLong(s"2019-${months}-30 00:00:00"))).toJSON.rdd.map(line =>{
          JSON.parseObject(line)
        }).saveToEs(s"2019_all_anchor/all_anchor_2019_${months}", Map("es.mapping.id" -> "anchorId"))
      }

    }

  }

  def tranTimeToString(timestamp:String) :String={
    val tm = timestamp+"000"
    val fm = new SimpleDateFormat("yyyy-M")
    val time = fm.format(new Date(tm.toLong))
    time
  }

}
