package com.shujia.spark.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo09Deal {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      //      .master("local")
      //      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport()
      // 开启Hive的支持
      .getOrCreate()

    spark.sql("use db01")

    spark.sql(
      """
        |select  t1.id
        |        ,round(sum(t1.sum_amount),4) as sum_amount
        |        ,count(*) as sum_days
        |        ,min(t1.datestr) as start_date
        |        ,max(t1.datestr) as end_date
        |        ,nvl(datediff(min(t1.datestr),lag(max(t1.datestr),1) over (partition by t1.id order by min(t1.datestr))) - 1,0) as interval_days
        |from (
        |        select  id
        |                ,datestr
        |                ,sum(amount) as sum_amount
        |                ,date_sub(datestr,row_number() over (partition by id order by datestr)) as grp
        |        from deal_tb
        |        group by id,datestr
        |) t1 group by t1.id,t1.grp
        |""".stripMargin)
    //      .show()

    val dealDF: DataFrame = spark.table("deal_tb")
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 通过DSL处理
    val resultDF: DataFrame = dealDF
      .groupBy($"id", $"datestr")
      .agg(sum($"amount") as "sum_amount")
      .withColumn("grp", date_sub($"datestr", row_number() over Window.partitionBy($"id").orderBy($"datestr")))
      .groupBy($"id", $"grp")
      .agg(round(sum($"sum_amount"), 4) as "sum_amount"
        , count("*") as "sum_days"
        , min($"datestr") as "start_date"
        , max($"datestr") as "end_date"
      )
      .withColumn("interval_days"
        , coalesce(datediff($"start_date"
          , lag($"end_date", 1) over Window.partitionBy($"id").orderBy($"start_date")
        ) - 1
          , expr("0")
        )
      )

    resultDF.createOrReplaceTempView("result_tb")

    spark.sql(
      """
        |insert overwrite table result
        |select * from result_tb
        |""".stripMargin)
  }

}
