package a_o2odata_deal.utils

import a_aa_amainpackage.a_o2odata_deal.config.config.tmall_anchor_id_path
import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/12/18 17:54
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object handle_zhibo_v3 {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")


    handle_zhibo(sqlContext:SQLContext)

  }

  def handle_zhibo(sqlContext:SQLContext):DataFrame={
    /*val data_detail = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/tmall/11/tmall_live_detail_path_v1/")
      .dropDuplicates("good_id","liveId")*/
    val data_id = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_id_path}")

      //.where("cast(liveTime as long)<1575129600 and cast(liveTime as long)>=1572537600")

    data_id.createOrReplaceTempView("data_id")
    //data_detail.createOrReplaceTempView("data_detail")

    /*sqlContext.sql(
      s"""
         |select
         |*
         |from data_id
         |where anchorId='69226163' and (cast(liveTime as long)>1575129600 or cast(liveTime as long)<1572537600)
       """.stripMargin).show(false)*/

    //计算每个主播的场次,正式代码

    sqlContext.sql(
      s"""
         |select
         |anchorId,
         |anchorFans
         |from
         |(select
         |anchorId,
         |anchorFans,
         |row_number() over(partition by anchorId order by cast(liveTime as bigint) desc) as rank
         |from data_id)
         |where rank=1
       """.stripMargin).createOrReplaceTempView("data1")

    sqlContext.sql(
      s"""
         |select
         |anchorId,
         |count(distinct liveId) as liveCount,
         |sum(viewerCount) as viewerCount,
         |--sum(anchorFans) as anchorFans,
         |sum(commodityCount) as liveGoodCount
         |from data_id
         |group by anchorId
       """.stripMargin).createOrReplaceTempView("data2")

    val result = sqlContext.sql(
      s"""
         |select
         |t2.*,
         |t1.anchorFans
         |from data1 t1
         |inner join
         |data2 t2
         |on t1.anchorId=t2.anchorId
       """.stripMargin)

    sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/tmall/all_userid/*/*")
      .selectExpr("userId").dropDuplicates().createOrReplaceTempView("tm_userid")
    sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/taobao/all_userid/*/*")
      .selectExpr("userId").dropDuplicates().createOrReplaceTempView("tb_userid")
    result.createOrReplaceTempView("all_data")

    sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case when t2.userId is null then '' else 'tmall' end as from_where
         |from all_data t1
         |left join
         |tm_userid t2
         |on t1.anchorId=t2.userId
         """.stripMargin).createOrReplaceTempView("mid")

    val result1 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case when t2.userId is null then t1.from_where else 'taobao' end as from_wheres
         |from mid t1
         |left join
         |tb_userid t2
         |on t1.anchorId=t2.userId
         """.stripMargin).drop("from_where")
      .withColumnRenamed("from_wheres","from_where")

    val v = sqlContext.read.json(s"${tmall_anchor_id_path}").toJSON.rdd.map(line =>{
      val nObject: JSONObject = JSON.parseObject(line)
      val nick: String = nObject.getOrDefault("nick", "-1").toString()
      nObject.put("nick",nick)
      nObject.toString
    })


      val anchor_id = sqlContext.read.json(v).where("nick!='-1'").selectExpr("anchorId","nick").dropDuplicates("anchorId")
    anchor_id.createOrReplaceTempView("anchor_id")

    anchor_id.printSchema()

    result1.createOrReplaceTempView("source_data")

    val result2 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case when t2.anchorId is null then '-1' else t2.nick end as nick
         |from source_data t1
         |left join
         |anchor_id t2
         |on t1.anchorId=t2.anchorId
         """.stripMargin)
    //临时代码
   /* val bu_data = sqlContext.read.json(s"s3a://o2o-dataproces-group/panzonghao/tmall/11/tmall_live_id_1911_bu")
    bu_data.createOrReplaceTempView("bu_data")

    val mid = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t1.fans as anchorFans,
         |case
         |when t1.commodityCount='-1' then t2.commodityCount
         |else t1.commodityCount
         |end as commodityCounts
         |from data_id t1
         |left join
         |bu_data t2
         |on t1.liveId=t2.liveId
       """.stripMargin).drop("commodityCount","fans").withColumnRenamed("commodityCounts","commodityCount")

    mid.createOrReplaceTempView("mid")
    mid.where("cast(anchorFans as bigint)>0").createOrReplaceTempView("mid1")

    sqlContext.sql(
      s"""
         |select
         |anchorId,
         |anchorFans
         |from
         |(select
         |anchorId,
         |anchorFans,
         |row_number() over(partition by anchorId order by cast(liveTime as bigint) desc) as rank
         |from mid1
         |) where rank=1
         """.stripMargin).createOrReplaceTempView("mid1")

    sqlContext.sql(
      s"""
         |select
         |anchorId,
         |count(distinct liveId) as liveCount,
         |sum(viewerCount) as viewerCount,
         |--sum(anchorFans) as anchorFans,
         |sum(commodityCount) as liveGoodCount
         |from mid
         |group by anchorId
       """.stripMargin).createOrReplaceTempView("mid2")

    val result = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.anchorId is null then 0
         |else t2.anchorFans
         |end as anchorFans
         |from mid2 t1
         |left join
         |mid1 t2
         |on t1.anchorId=t2.anchorId
         """.stripMargin)*/


    //result.where("anchorId='69226163'").show(false)
    //println("*********"+result.count())

   /* //计算每个主播直播的商品数
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t1.fans as anchorFans,
         |t2.livePriceText,
         |t2.good_id as good_ids
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).drop("good_id","fans").withColumnRenamed("good_ids","good_id").where("good_id!='-1'")
      .dropDuplicates("anchorId","good_id")

    source_data.createOrReplaceTempView("source_data")

    sqlContext.sql(
      s"""
         |select
         |anchorId,
         |count(distinct good_id) as liveGoodCount
         |from source_data
         |group by anchorId
       """.stripMargin).createOrReplaceTempView("mid1")

    val result = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.liveCount,
         |t2.viewerCount,
         |t2.anchorFans
         |from mid1 t1
         |inner join
         |mid t2
         |on t1.anchorId=t2.anchorId
       """.stripMargin)
*/

    import org.apache.spark.sql.functions._
    import org.elasticsearch.spark._
    result2.withColumn("timeStamp",lit("1580313600")).toJSON.rdd.map(line =>{
      JSON.parseObject(line)
    }).saveToEs(s"2020_all_anchor/all_anchor_2020_1", Map("es.mapping.id" -> "anchorId"))
    null
  }

}
