package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo08SparkOnHive {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo08SparkOnHive")
      .master("local")
      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._


    val dealDF: DataFrame = spark.table("default.deal_tb")
    dealDF.show()

    spark.sql("use default")

    spark.sql(
      """
        |select  t1.id
        |        ,t1.grp
        |        ,round(sum(t1.sum_amount),3) as total_amount -- 连续交易总额
        |        ,count(1) as total_days -- 连续登录天数
        |        ,min(datestr) as start_date -- 连续登录开始的时间
        |        ,max(datestr) as end_date -- 连续登录结束的时间
        |        ,datediff(t1.grp,lag(t1.grp,1) over(partition by t1.id order by t1.grp)) as interval_days -- 间隔天数
        |from (
        |    select  id
        |            ,datestr
        |            ,round(sum(amount),3) as sum_amount
        |            ,date_sub(datestr,row_number() over(partition by id order by datestr)) as grp
        |    from deal_tb
        |    group by id,datestr
        |) t1 group by t1.id,t1.grp
        |""".stripMargin)
      .show()


    dealDF
      .groupBy($"id", $"datestr")
      .agg(sum($"amount") as "sum_amount")
      // 对每个用户进行partition by，以datestr进行orderBy 形成排名rn
      .withColumn("rn", row_number() over Window.partitionBy($"id").orderBy($"datestr"))
      // 用登录日期 减去 rn 得到grp，如果是连续登录的日期 则最终grp应该相等
      .withColumn("grp", from_unixtime(unix_timestamp($"datestr", "yyyy-MM-dd") - $"rn" * 60 * 60 * 24, "yyyy-MM-dd"))
      // grp可以标识每个用户每段连续登录区间
      // 对每段连续登录区间进行统计
      .groupBy($"id", $"grp")
      .agg(round(sum($"sum_amount")) as "total_amount"
        , count("*") as "total_days"
        , min($"datestr") as "start_date"
        , max($"datestr") as "end_date"
      )
      // 使用lag函数取上一个连续登录区间的end_date
      .withColumn("last_end_date", lag($"end_date", 1) over Window.partitionBy($"id").orderBy($"grp"))
      // 用当前连续登录区间的start_date减去上一个连续登录区间的end_date
      .withColumn("interval_days", datediff($"start_date", $"last_end_date") - 1)
      .select(
        $"id"
        , $"total_amount"
        , $"total_days"
        , $"start_date"
        , $"end_date"
        , coalesce($"interval_days", expr("0")) as "interval_days"
      ).show()
  }

}
