package sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DataTypes, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import util.YieeDateUtils

/**
 * 用户活跃留存分析报表 ADS_APL_ART_REC
 * 原表：用户连续活跃区间记录表 DWS_APL_UCA_RNG
 * CREATE TABLE ADS_APL_ART_REC(
 * dt       string, --计算日期
 * act_day  string, --活跃日期
 * rt_days  int,    --留存天数
 * rt_users int,    --留存人数
 * )
 * stored as parquet;
 */
object ADS_APL_ART_REC {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().appName(this.getClass.getSimpleName).master("local[4]").getOrCreate()
    import spark.implicits._
    val schema: StructType = new StructType().add("guid", DataTypes.LongType)
      .add("first_dt", DataTypes.StringType)
      .add("rng_start", DataTypes.StringType)
      .add("rng_end", DataTypes.StringType)
    val frame: DataFrame = spark.read.schema(schema).csv("data/active_range/demo.dat")
    val rng: Dataset[Row] = frame.where("rng_end = '9999-12-31' and rng_start < '2020-03-14'")
    rng.show()
    val act_dt: DataFrame = rng.flatMap(row => {
      val guid = row.getAs[Long]("guid")
      val rng_start = row.getAs[String]("rng_start")
      val diff: Long = YieeDateUtils.dateDiff(rng_start, "2020-03-14")
      for (i <- 0 until  diff.toInt) yield ("guid", YieeDateUtils.dateAdd(rng_start, i.toInt))
    }).toDF("guid", "act_day")
    // 汇总聚合
    act_dt.createTempView("act")
    val res: DataFrame = spark.sql(
      """
        |select
        |'2020-03-14',
        |act_day,
        |datediff('2020-03-14', act_day) as rt_days,
        |count(1) as rt_users
        |from act
        |group by act_day
        |""".stripMargin)
    res.show()

    // todo 将表存入hive数据库中
    spark.close()

  }
}
