package sql

import java.text.SimpleDateFormat
import java.util.Date

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DataTypes, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession}

import scala.collection.immutable

/**
 * 访问间隔分布用户聚合表DWS_APL_ITV_AGU，interval访问，以用户为聚合
 */
object DWS_APL_ITV_AGU {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .master("local[4]")
      .appName(this.getClass.getSimpleName)
      .getOrCreate()
    import spark.implicits._
    // 读取hive中的当日区间记录表
    //    val rng: DataFrame = spark.read.table("doit12.dws_apl_uca_rng").where("dt='2020-03-14'")

    val schema: StructType = new StructType().add("guid", DataTypes.LongType)
      .add("first_dt", DataTypes.StringType)
      .add("rng_start", DataTypes.StringType)
      .add("rng_end", DataTypes.StringType)
    val rng: DataFrame = spark.read.schema(schema).csv("E:\\doit12_yiee\\data\\active_range\\demo.dat")
    rng.createTempView("rng")
    // 对数据按照最近30天的约束，做一个截断，并且将9999-12-31换成当日的日期便于做差值计算
    val rng2: DataFrame = spark.sql(
      """
        |select
        |guid,
        |if(datediff("2020-03-14", rng_start) > 30, date_sub("2020-03-14",30), rng_start) as rng_start,
        |if(rng_end="9999-12-31", "2020-03-14", rng_end) as rng_end
        |from rng
        |where datediff("2020-03-14", rng_end) <= 30
        |""".stripMargin)
    val rdd: RDD[(Long, (String, String))] = rng2.rdd.map(row => {
      val guid = row.getAs[Long]("guid")
      val rng_start = row.getAs[String]("rng_start")
      val rng_end = row.getAs[String]("rng_end")
      (guid, (rng_start, rng_end))
    })
    // 按guid分组 ==> (1, Iterator[(2020-02-13),(2020-03-11)...]
    val rdd2: RDD[(Long, Iterable[(String, String)])] = rdd.groupByKey()
    val rdd3: RDD[(Long, Int, Int)] = rdd2.flatMap(tp => {
      val guid: Long = tp._1
      val rngs: List[(String, String)] = tp._2.toList
      val itr0Day: List[(Long, Int, Int)] = rngs.map(rng => (guid, 0, date_diff(rng._1, rng._2).toInt))
      // 隔0天的计算

      val sorted: List[(String, String)] = rngs.sortBy(_._1)
      val itrXDay: immutable.Seq[(Long, Int, Int)] = for (i <- 0 until sorted.length - 1) yield (guid, date_diff(sorted(i)._2, sorted(i + 1)._1).toInt, 1)
      itr0Day ++ itrXDay
    })
    val frame: DataFrame = rdd3.toDF("guid", "itv_days", "counts")
    val frame1: DataFrame = frame.groupBy("guid", "itv_days").sum("counts")
    frame1.show()

    // TODO 将数据保存到hive中即可

    spark.close()
  }

  def date_diff(day1: String, day2: String): Long = {
    val sdf: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
    val dt1: Date = sdf.parse(day1)
    val dt2: Date = sdf.parse(day2)
    (dt2.getTime - dt1.getTime) / (24 * 60 * 60 * 1000)
  }
}
