package a_o2odata_deal.utils.zhibo_qy

import a_aa_amainpackage.a_o2odata_deal.config.config._
import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.sql.SQLContext

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/2/11 16:13
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object handle_zhibo_live_zhubo {

  def handle_zhibo_live_zhubo(sqlContext:SQLContext):Unit={
    handle_zhibo_tmall_live_zhubo(sqlContext)
    handle_zhibo_taobao_live_zhubo(sqlContext)
  }

  def handle_zhibo_tmall_live_zhubo(sqlContext:SQLContext):Unit={
    //利用历史所有数据来确定是否是自主直播
    val all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/zhibo_zf/backups/all_userid/*/*")
      .dropDuplicates("userId")

    all_data.createOrReplaceTempView("all_data")


    //关联直播
    val data_detail = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_detail_path}")
      .dropDuplicates("good_id","liveId")
    val data_id = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_id_path}")


    val zhibo_data = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_finally")
      .selectExpr("userId","good_id","sellCount","salesAmount")



    zhibo_data.createOrReplaceTempView("zhibo_data")

    data_id.drop("good_id").createOrReplaceTempView("data_id")
    data_detail.createOrReplaceTempView("data_detail")

    //计算一个主播的直播次数及累计观看人数
    sqlContext.sql(
      s"""
         |select
         |t3.*
         |from
         |(select distinct
         |t1.*,
         |t2.good_id
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId) t3
         |inner join
         |zhibo_data t4
         |on t3.good_id=t4.good_id
       """.stripMargin).drop("good_id").dropDuplicates().createOrReplaceTempView("data_id1")

    val mid = sqlContext.sql(
      s"""
         |select distinct
         |*,
         |sum(viewerCount) over(partition by anchorId) as viewerCounts,
         |sum(anchorFans) over(partition by anchorId) as anchorFanss,
         |count(liveId) over(partition by anchorId) as liveCounts
         |from data_id1
       """.stripMargin).drop("viewerCount","liveCount","anchorFans")
      .withColumnRenamed("viewerCounts","viewerCount")
      .withColumnRenamed("liveCounts","liveCount")
      .withColumnRenamed("anchorFanss","anchorFans")

    mid.createOrReplaceTempView("data_id2")

    //println("采集到总的直播数："+data_id.count()+"*****************")
    //println("可以关联到天猫商品的直播数："+mid.count()+"*****************")

    //首先统计出单品在多少个直播间出现过,1对多的关系，关联出来数据会增多
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.good_id
         |from data_id2 t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).dropDuplicates("anchorId","good_id")
      .createOrReplaceTempView("source_data")

    import org.apache.spark.sql.functions._
    //打上是否是自主直播标签
    val result_mid = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.userId is null then false
         |else true
         |end as is_selfLive
         |from source_data t1
         |left join
         |--zhibo_data t2
         |all_data t2
         |on t1.anchorId=t2.userId
       """.stripMargin).dropDuplicates("anchorId","good_id")
      .withColumn("timeStamp",lit(timeStamp))

    result_mid.createOrReplaceTempView("result_mid")

    val result = sqlContext.sql(
      s"""
         |select
         |*,
         |sum(sellCount) over(partition by anchorId) as sells,
         |sum(salesAmount) over(partition by anchorId) as sales
         |from
         |(select
         |t1.*,
         |t2.sellCount,
         |t2.salesAmount
         |from result_mid t1
         |inner join
         |zhibo_data t2
         |on t1.good_id=t2.good_id)
       """.stripMargin).drop("good_id","sellCount","salesAmount")
      .withColumnRenamed("sells","sellCount")
      .withColumnRenamed("sales","salesAmount")
      .dropDuplicates("anchorId")


    result.createOrReplaceTempView("result")


    val v = sqlContext.read.json(s"${tmall_anchor_id_path}").toJSON.rdd.map(line =>{
      val nObject: JSONObject = JSON.parseObject(line)
      val nick: String = nObject.getOrDefault("nick", "-1").toString()
      nObject.put("nick",nick)
      nObject.toString
    })


    val anchor_id = sqlContext.read.json(v).where("nick!='-1'").selectExpr("anchorId","nick").dropDuplicates("anchorId")
    anchor_id.createOrReplaceTempView("anchor_id")

    val result1 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.anchorId is null then t1.nick
         |else t2.nick
         |end as nicks
         |from result t1
         |left join
         |anchor_id t2
         |on t1.anchorId=t2.anchorId
       """.stripMargin).drop("nick").withColumnRenamed("nicks","nick")



    //val tm_all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/tmall/all_userid/*/*")
    //  .selectExpr("userId").dropDuplicates()
    //val tb_all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/taobao/all_userid/*/*")
    //  .selectExpr("userId").dropDuplicates()
    //val all = tm_all_data.union(tb_all_data).dropDuplicates()
    //all.createOrReplaceTempView("all")

    result1.createOrReplaceTempView("tb_data")
    val result2 = sqlContext.sql(
      s"""
         |select
         |*,
         |case when anchorId='69226163' then false else is_selfLive end as is_selfLives
         |from tb_data
         """.stripMargin).drop("is_selfLive")
      .withColumnRenamed("is_selfLives","is_selfLive")



    result2.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_zhubo")
  }

  def handle_zhibo_taobao_live_zhubo(sqlContext:SQLContext):Unit={
    //利用历史所有数据来确定是否是自主直播
    val all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/zhibo_zf/backups/all_userid/*/*")
      .dropDuplicates("userId")

    all_data.createOrReplaceTempView("all_data")


    //关联直播
    val data_detail = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_detail_path}")
      .dropDuplicates("good_id","liveId")
    val data_id = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_id_path}")


    val zhibo_data = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_finally")
      .selectExpr("userId","good_id","sellCount","salesAmount")



    zhibo_data.createOrReplaceTempView("zhibo_data")

    data_id.drop("good_id").createOrReplaceTempView("data_id")
    data_detail.createOrReplaceTempView("data_detail")

    //计算一个主播的直播次数及累计观看人数
    sqlContext.sql(
      s"""
         |select
         |t3.*
         |from
         |(select distinct
         |t1.*,
         |t2.good_id
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId) t3
         |inner join
         |zhibo_data t4
         |on t3.good_id=t4.good_id
       """.stripMargin).drop("good_id").dropDuplicates().createOrReplaceTempView("data_id1")

    val mid = sqlContext.sql(
      s"""
         |select distinct
         |*,
         |sum(viewerCount) over(partition by anchorId) as viewerCounts,
         |sum(anchorFans) over(partition by anchorId) as anchorFanss,
         |count(liveId) over(partition by anchorId) as liveCounts
         |from data_id1
       """.stripMargin).drop("viewerCount","liveCount","anchorFans")
      .withColumnRenamed("viewerCounts","viewerCount")
      .withColumnRenamed("liveCounts","liveCount")
      .withColumnRenamed("anchorFanss","anchorFans")

    mid.createOrReplaceTempView("data_id2")

    //println("采集到总的直播数："+data_id.count()+"*****************")
    //println("可以关联到天猫商品的直播数："+mid.count()+"*****************")

    //首先统计出单品在多少个直播间出现过,1对多的关系，关联出来数据会增多
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.good_id
         |from data_id2 t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).dropDuplicates("anchorId","good_id")
      .createOrReplaceTempView("source_data")

    import org.apache.spark.sql.functions._
    //打上是否是自主直播标签
    val result_mid = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.userId is null then false
         |else true
         |end as is_selfLive
         |from source_data t1
         |left join
         |--zhibo_data t2
         |all_data t2
         |on t1.anchorId=t2.userId
       """.stripMargin).dropDuplicates("anchorId","good_id")
      .withColumn("timeStamp",lit(timeStamp))

    result_mid.createOrReplaceTempView("result_mid")

    val result = sqlContext.sql(
      s"""
         |select
         |*,
         |sum(sellCount) over(partition by anchorId) as sells,
         |sum(salesAmount) over(partition by anchorId) as sales
         |from
         |(select
         |t1.*,
         |t2.sellCount,
         |t2.salesAmount
         |from result_mid t1
         |inner join
         |zhibo_data t2
         |on t1.good_id=t2.good_id)
       """.stripMargin).drop("good_id","sellCount","salesAmount")
      .withColumnRenamed("sells","sellCount")
      .withColumnRenamed("sales","salesAmount")
      .dropDuplicates("anchorId")


    result.createOrReplaceTempView("result")


    val v = sqlContext.read.json(s"${tmall_anchor_id_path}").toJSON.rdd.map(line =>{
      val nObject: JSONObject = JSON.parseObject(line)
      val nick: String = nObject.getOrDefault("nick", "-1").toString()
      nObject.put("nick",nick)
      nObject.toString
    })


    val anchor_id = sqlContext.read.json(v).where("nick!='-1'").selectExpr("anchorId","nick").dropDuplicates("anchorId")
    anchor_id.createOrReplaceTempView("anchor_id")

    val result1 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.anchorId is null then t1.nick
         |else t2.nick
         |end as nicks
         |from result t1
         |left join
         |anchor_id t2
         |on t1.anchorId=t2.anchorId
       """.stripMargin).drop("nick").withColumnRenamed("nicks","nick")



    //val tm_all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/tmall/all_userid/*/*")
    //  .selectExpr("userId").dropDuplicates()
    //val tb_all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/taobao/all_userid/*/*")
    //  .selectExpr("userId").dropDuplicates()
    //val all = tm_all_data.union(tb_all_data).dropDuplicates()
    //all.createOrReplaceTempView("all")

    result1.createOrReplaceTempView("tb_data")
    val result2 = sqlContext.sql(
      s"""
         |select
         |*,
         |case when anchorId='69226163' then false else is_selfLive end as is_selfLives
         |from tb_data
         """.stripMargin).drop("is_selfLive")
      .withColumnRenamed("is_selfLives","is_selfLive")



    result2.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_zhubo")
  }

}
