package a_o2odata_deal.utils

import java.text.SimpleDateFormat

import org.apache.spark.sql.{DataFrame, SQLContext}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/12/18 10:57
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object handle_zhibo_zhubo_loop {

  def handle_zhubo_good(sqlContext:SQLContext,months:String):DataFrame={
    //关联直播
    val data_detail = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${months}/tmall_live_detail_191101_before")
      .dropDuplicates("good_id","liveId")
    val data_id = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/11/tmall_live_id_191101_before")

    val zhibo_data = sqlContext.read.json(s"s3a://o2o-dataproces-group/panzonghao/zhibo/backups/tmall/${months}")
      .selectExpr("userId","good_id","sellCount","salesAmount")

    val all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/tmall/all_userid")
    all_data.createOrReplaceTempView("all_data")

    zhibo_data.createOrReplaceTempView("zhibo_data")

    data_id.drop("good_id").createOrReplaceTempView("data_id")
    data_detail.createOrReplaceTempView("data_detail")

    //计算一个主播的直播次数及累计观看人数
    sqlContext.sql(
      s"""
         |select
         |t3.*
         |from
         |(select distinct
         |t1.*,
         |t2.good_id
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId) t3
         |inner join
         |zhibo_data t4
         |on t3.good_id=t4.good_id
       """.stripMargin).drop("good_id").dropDuplicates().createOrReplaceTempView("data_id1")

    val mid = sqlContext.sql(
      s"""
         |select distinct
         |*,
         |sum(viewerCount) over(partition by anchorId) as viewerCounts,
         |sum(anchorFans) over(partition by anchorId) as anchorFanss,
         |count(liveId) over(partition by anchorId) as liveCounts
         |from data_id1
       """.stripMargin).drop("viewerCount","liveCount","anchorFans")
      .withColumnRenamed("viewerCounts","viewerCount")
      .withColumnRenamed("liveCounts","liveCount")
      .withColumnRenamed("anchorFanss","anchorFans")

    mid.createOrReplaceTempView("data_id2")

    //println("采集到总的直播数："+data_id.count()+"*****************")
    //println("可以关联到天猫商品的直播数："+mid.count()+"*****************")

    //首先统计出单品在多少个直播间出现过,1对多的关系，关联出来数据会增多
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |--t1.fans as anchorFans,
         |t2.good_id
         |from data_id2 t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).dropDuplicates("anchorId","good_id")
      .createOrReplaceTempView("source_data")

    import org.apache.spark.sql.functions._
    //打上是否是自主直播标签
    val result_mid = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.userId is null then false
         |else true
         |end as is_selfLive
         |from source_data t1
         |left join
         |all_data t2
         |on t1.anchorId=t2.userId
       """.stripMargin).dropDuplicates("anchorId","good_id")
      .withColumn("timeStamp",lit(tranTimeToLong(s"2019-${months}-30 00:00:00")))

    result_mid.createOrReplaceTempView("result_mid")

    val result = sqlContext.sql(
      s"""
         |select
         |*,
         |sum(sellCount) over(partition by anchorId) as sells,
         |sum(salesAmount) over(partition by anchorId) as sales
         |from
         |(select
         |t1.*,
         |t2.sellCount,
         |t2.salesAmount
         |from result_mid t1
         |inner join
         |zhibo_data t2
         |on t1.good_id=t2.good_id)
       """.stripMargin).drop("good_id","sellCount","salesAmount")
      .withColumnRenamed("sells","sellCount")
      .withColumnRenamed("sales","salesAmount")
      .dropDuplicates("anchorId")


    result.createOrReplaceTempView("result")

    sqlContext.sql(
      s"""
         |select distinct
         |anchorId,
         |nick
         |from
         |(select
         |anchorId,
         |nick,
         |row_number() over(partition by anchorId order by nick desc) as rank
         |from data_id)
         |where rank=1
       """.stripMargin).createOrReplaceTempView("mid_v1")

    val result1 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.anchorId is null then t1.nick
         |else t2.nick
         |end as nicks
         |from result t1
         |left join
         |mid_v1 t2
         |on t1.anchorId=t2.anchorId
       """.stripMargin).drop("nick").withColumnRenamed("nicks","nick")



    result1.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${months}/zhibo_zhubo_test")
    //result1.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${months}/zhibo_zhubo_test")
    null
  }


  ///////////////////////////////////////////

  def handle_zhubo(sqlContext:SQLContext,months:String):DataFrame={
    //关联直播
    val data_detail = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${months}/tmall_live_detail_191101_before")
      .dropDuplicates("good_id","liveId")
    val data_id = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/11/tmall_live_id_191101_before")

    /* val zhibo_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/zhibo/backups/tmall/11")
       .drop("viewcount","roomTypeName","roomType","nick","liveTitle","liveTime","liveId","is_selfLive",
         "is_showLive","anchorId","commodityCount","liveCount","anchorFans","live_priceText","liveAvgPriceText")*/

    val zhibo_data = sqlContext.read.json(s"s3a://o2o-dataproces-group/panzonghao/zhibo/backups/taobao/${months}")

    zhibo_data.createOrReplaceTempView("zhibo_data")

    data_id.createOrReplaceTempView("data_id")
    data_detail.createOrReplaceTempView("data_detail")

    //首先统计出单品在多少个直播间出现过,1对多的关系，关联出来数据会增多
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |--t1.fans as anchorFans,
         |t2.livePriceText,
         |t2.good_id as good_ids
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).drop("good_id").withColumnRenamed("good_ids","good_id").where("good_id!='-1'")

    source_data.createOrReplaceTempView("source_data")

    sqlContext.sql(
      s"""
         |select
         |*,
         |concat(concat(liveId,"+"),good_id) as liveAndgood_id
         |from source_data
       """.stripMargin).createOrReplaceTempView("mid")

    sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.sellCount,
         |t2.priceText,
         |t2.salesAmount,
         |t2.firstCategoryId,
         |t2.secondCategoryId,
         |t2.thirdCategoryId,
         |t2.fourthCategoryId
         |from mid t1
         |inner join
         |zhibo_data t2
         |on t1.good_id=t2.good_id
       """.stripMargin).createOrReplaceTempView("value")

    import org.apache.spark.sql.functions._
    //打上是否是自主直播标签
    val result = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.userId is null then false
         |else true
         |end as is_selfLive
         |from value t1
         |left join
         |zhibo_data t2
         |on t1.anchorId=t2.userId
       """.stripMargin).dropDuplicates("liveAndgood_id").withColumn("timeStamp",lit(tranTimeToLong(s"2019-${months}-30 00:00:00")))

    //result.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${months}/zhibo_live_good")
    result.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${months}/zhibo_live_good")
    null
  }

  ////////////////////////////////////////


  def tranTimeToLong(tm:String) :String={
    val fm = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val dt = fm.parse(tm)
    val aa = fm.format(dt)
    val tim: Long = dt.getTime()
    (tim/1000).toString
  }
}
