package a_o2odata_deal.utils.zhibo_qy

import java.text.SimpleDateFormat

import a_aa_amainpackage.a_o2odata_deal.config.config._
import org.apache.spark.sql.SQLContext

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/2/11 15:42
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object handle_zhibo_live_good {

  def handle_live_good(sqlContext:SQLContext):Unit={
    handle_tmall_live_good(sqlContext)
    handle_taobao_live_good(sqlContext)
  }

  def handle_tmall_live_good(sqlContext:SQLContext):Unit={
    //关联直播
    val data_detail = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_detail_path}")
      .dropDuplicates("good_id","liveId")
    val data_id = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_id_path}")
    //.where("cast(liveTime as long)<1575129600 and cast(liveTime as long)>=1572537600")

    val zhibo_data = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_finally")

    zhibo_data.createOrReplaceTempView("zhibo_data")

    data_id.createOrReplaceTempView("data_id")
    data_detail.createOrReplaceTempView("data_detail")

    //首先统计出单品在多少个直播间出现过,1对多的关系，关联出来数据会增多
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.livePriceText,
         |t2.good_id as good_ids
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).drop("good_id").withColumnRenamed("good_ids","good_id").where("good_id!='-1'")

    source_data.createOrReplaceTempView("source_data")

    sqlContext.sql(
      s"""
         |select
         |*,
         |concat(concat(liveId,"+"),good_id) as liveAndgood_id
         |from source_data
       """.stripMargin).createOrReplaceTempView("mid")

    sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.sellCount,
         |t2.priceText,
         |t2.salesAmount,
         |t2.firstCategoryId,
         |t2.secondCategoryId,
         |t2.thirdCategoryId,
         |t2.fourthCategoryId
         |from mid t1
         |inner join
         |zhibo_data t2
         |on t1.good_id=t2.good_id
       """.stripMargin).createOrReplaceTempView("value")

    import org.apache.spark.sql.functions._
    //打上是否是自主直播标签
    val result1 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.userId is null then false
         |else true
         |end as is_selfLive
         |from value t1
         |left join
         |zhibo_data t2
         |on t1.anchorId=t2.userId
       """.stripMargin).dropDuplicates("liveAndgood_id").withColumn("timeStamp",lit(timeStamp))



    //val tm_all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/tmall/all_userid/*/*")
    //  .selectExpr("userId").dropDuplicates()
    //val tb_all_data = sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/taobao/all_userid/*/*")
    //  .selectExpr("userId").dropDuplicates()
    //val all_data = tm_all_data.union(tb_all_data).dropDuplicates()
    //all_data.createOrReplaceTempView("all_data")
    sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/zhibo_zf/backups/all_userid/*/*")
      .dropDuplicates("userId").createOrReplaceTempView("all_data")
    result1.createOrReplaceTempView("tm_data")
    val result = sqlContext.sql(
      s"""
         |select
         |*,
         |case when anchorId='69226163' then false else is_selfLives end as is_selfLivess
         |from
         |(select
         |t1.*,
         |case when t2.userId is null then 'false' else 'true' end as is_selfLives
         |from tm_data t1
         |left join
         |all_data t2
         |on t1.anchorId=t2.userId)
         """.stripMargin).drop("is_selfLive","is_selfLivess")
      .withColumnRenamed("is_selfLivess","is_selfLive")


    result.repartition(4).write
      .orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_live_good")
  }

  def handle_taobao_live_good(sqlContext:SQLContext):Unit={
    //关联直播
    val data_detail = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_detail_path}")
      .dropDuplicates("good_id","liveId")
    val data_id = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_id_path}")
    //.where("cast(liveTime as long)<1575129600 and cast(liveTime as long)>=1572537600")

    val zhibo_data = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_finally")


    zhibo_data.createOrReplaceTempView("zhibo_data")

    data_id.createOrReplaceTempView("data_id")
    data_detail.createOrReplaceTempView("data_detail")

    //首先统计出单品在多少个直播间出现过,1对多的关系，关联出来数据会增多
    val source_data = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.livePriceText,
         |t2.good_id as good_ids
         |from data_id t1
         |inner join
         |data_detail t2
         |on t1.liveId=t2.liveId
       """.stripMargin).drop("good_id").withColumnRenamed("good_ids","good_id").where("good_id!='-1'")

    source_data.createOrReplaceTempView("source_data")

    sqlContext.sql(
      s"""
         |select
         |*,
         |concat(concat(liveId,"+"),good_id) as liveAndgood_id
         |from source_data
       """.stripMargin).createOrReplaceTempView("mid")

    sqlContext.sql(
      s"""
         |select
         |t1.*,
         |t2.sellCount,
         |t2.priceText,
         |t2.salesAmount,
         |t2.firstCategoryId,
         |t2.secondCategoryId,
         |t2.thirdCategoryId,
         |t2.fourthCategoryId
         |from mid t1
         |inner join
         |zhibo_data t2
         |on t1.good_id=t2.good_id
       """.stripMargin).createOrReplaceTempView("value")

    import org.apache.spark.sql.functions._
    //打上是否是自主直播标签
    val result1 = sqlContext.sql(
      s"""
         |select
         |t1.*,
         |case
         |when t2.userId is null then false
         |else true
         |end as is_selfLive
         |from value t1
         |left join
         |zhibo_data t2
         |on t1.anchorId=t2.userId
       """.stripMargin).dropDuplicates("liveAndgood_id").withColumn("timeStamp",lit(timeStamp))

    sqlContext.read.json("s3a://o2o-dataproces-group/panzonghao/zhibo_zf/backups/all_userid/*/*")
      .dropDuplicates("userId").createOrReplaceTempView("all_data")
    result1.createOrReplaceTempView("tb_data")
    val result = sqlContext.sql(
      s"""
         |select
         |*,
         |case when anchorId='69226163' then false else is_selfLives end as is_selfLivess
         |from
         |(select
         |t1.*,
         |case when t2.userId is null then 'false' else 'true' end as is_selfLives
         |from tb_data t1
         |left join
         |all_data t2
         |on t1.anchorId=t2.userId)
         """.stripMargin).drop("is_selfLive","is_selfLivess")
      .withColumnRenamed("is_selfLivess","is_selfLive")

    result.repartition(4).write
      .orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_live_good")
  }

  def tranTimeToLong(tm:String) :Long={
    val fm = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val dt = fm.parse(tm)
    val aa = fm.format(dt)
    val tim: Long = dt.getTime()
    tim/1000
  }

}

// zhibo_live_good