package cn.doitedu.dw.sql

import java.text.SimpleDateFormat

import org.apache.commons.lang3.time.DateUtils
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{DataTypes, StructType}

import scala.collection.immutable
import scala.collection.mutable.ListBuffer

/**
 * 计算目标： 访问间隔分布用户聚合表 DWS_APL_ITV_AGU
 * 截止日期       guid    间隔天数    发生次数
 * 2020-03-12     1        0         10
 * 2020-03-12     1        1         2
 * 2020-03-12     1        2         4
 * ......
 *
 * 计算源表：  用户连续活跃区间记录表  DWS_APL_UCA_RNG
 */
object DWS_APL_ITV_AGU {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._

    // TODO 读取hive中的当日区间记录表
    //val rng = spark.read.table("doit13.DWS_APL_UCA_RNG").where("dt='2020-03-14'")

    val schema = new StructType()
      .add("guid",DataTypes.LongType)
      .add("first_dt",DataTypes.StringType)
      .add("rng_start",DataTypes.StringType)
      .add("rng_end",DataTypes.StringType)
    val rng = spark.read.schema(schema).csv("data/interval_demodata/demo.dat")
    /**
     * 1,2020-05-20,2020-01-01,2020-01-12
     * 1,2020-05-20,2020-02-09,2020-03-11
     * 1,2020-05-20,2020-03-13,9999-12-31
     */



    // 对数据按最近30天的约束，做一个截断！ 并且将9999-12-31换成当天日期，便于求差值
    rng.createTempView("rng")
    val rng2 = spark.sql(
      """
        |select
        |guid,
        |if(datediff('2020-03-14',rng_start)>30,date_sub('2020-03-14',30),rng_start) as rng_start,
        |if(rng_end='9999-12-31','2020-03-14',rng_end) as rng_end
        |from rng
        |where datediff('2020-03-14',rng_end)<=30
        |""".stripMargin)
    rng2.show(100,false)
    /**
     * +----+----------+------------+
     * |guid|rng_start |rng_end     |
     * +----+----------+------------+
     * |1   |2020-02-13|2020-03-11  |
     * |1   |2020-03-13|2020-03-14  |
     */

    // 计算隔0天的发生次数 ，隔x天发生1次
    val rdd = rng2.rdd.map(row=>{
      val guid = row.getAs[Long]("guid")
      val rng_start = row.getAs[String]("rng_start")
      val rng_end = row.getAs[String]("rng_end")
      (guid,(rng_start,rng_end))
    })

    // 按guid分组  ==>(1,Iterator[(2020-02-13,2020-03-11),(2020-03-13,2020-03-14)])
    val rdd2 = rdd.groupByKey()

    // 将每个人的每个区间，去生成    (guid,隔 x天，发生 y 次 )
    val rdd3 = rdd2.flatMap(tp=>{
      val guid = tp._1
      val rngs: List[(String, String)] = tp._2.toList

      // 隔0天的计算
      val itr0Day: List[(Long, Int, Int)] = rngs.map(rng=>(guid,0,datediff(rng._1,rng._2).toInt))

      // 隔 n天的计算  先将区间排序
      val sortedRngs = rngs.sortBy(rng=>rng._1)

      // 迭代区间列表，错位求差值得到隔x天，1次
      val itrxDay: immutable.Seq[(Long, Int, Int)] = for(i<- 0 until sortedRngs.size-1) yield (guid,datediff(sortedRngs(i)._2,sortedRngs(i+1)._1).toInt,1)

      // 返回
      itr0Day ++ itrxDay
    })

    //rdd3.take(100).foreach(println)
    /**
     * (1,0,27)
     * (1,0,1)
     * (1,2,1)
     */

    val res = rdd3.toDF("guid","itv_days","cnts").groupBy("guid","itv_days").sum("cnts")
        .show(100,false)

    // TODO 将res保存到hive即可


    spark.close()

  }

  def datediff(dt1:String,dt2:String):Long={
    val sdf = new SimpleDateFormat("yyyy-MM-dd")
    val d1 = sdf.parse(dt1)
    val d2 = sdf.parse(dt2)

    (d2.getTime - d1.getTime)/(24*60*60*1000)
  }

}
