package com.shujia.sql

import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql.{DataFrame, SparkSession}

import java.text.SimpleDateFormat
import java.util.Date

object Demo07Deal {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .config("spark.sql.shuffle.partitions", "2")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val dealDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      //      .option("header", "true")
      .schema("id String,datestr String,amount Double")
      .load("spark/data/deal.csv")
    dealDF.printSchema()

    dealDF.show()

    // 自定义函数
    val myDateSubUDF: UserDefinedFunction = udf((dateStr: String, rn: Int) => {
      // 用SimpleDateFormat进行处理
      val simpleDateFormat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
      val date: Date = simpleDateFormat.parse(dateStr)
      simpleDateFormat.format(new Date(date.getTime - rn * 60 * 60 * 24 * 1000))
    })

    // 将每个用户每一天的交易金额进行合并
    val sumDealDF: DataFrame = dealDF
      .groupBy($"id", $"datestr")
      .agg(sum($"amount") as "sum_amount")

    // 对每个用户进行partition by，以datestr进行orderBy 形成排名rn
    sumDealDF
      .withColumn("rn", row_number() over Window.partitionBy($"id").orderBy($"datestr"))
      // 用登录日期 减去 rn 得到grp，如果是连续登录的日期 则最终grp应该相等
      //      .withColumn("grp", from_unixtime(unix_timestamp($"datestr", "yyyy-MM-dd") - $"rn" * 60 * 60 * 24, "yyyy-MM-dd"))
      // 原生的datesub无法支持两列相减，可使用UDF完成
      .withColumn("grp", myDateSubUDF($"datestr", $"rn"))
      // grp可以标识每个用户每段连续登录区间
      // 对每段连续登录区间进行统计
      .groupBy($"id", $"grp")
      .agg(round(sum($"sum_amount")) as "total_amount"
        , count("*") as "total_days"
        , min($"datestr") as "start_date"
        , max($"datestr") as "end_date"
      )
      // 使用lag函数取上一个连续登录区间的end_date
      .withColumn("last_end_date", lag($"end_date", 1) over Window.partitionBy($"id").orderBy($"grp"))
      // 用当前连续登录区间的start_date减去上一个连续登录区间的end_date
      .withColumn("interval_days", datediff($"start_date", $"last_end_date") - 1)
      .select(
        $"id"
        , $"total_amount"
        , $"total_days"
        , $"start_date"
        , $"end_date"
        , coalesce($"interval_days", expr("0")) as "interval_days"
      ).show()
  }

}
