package chapter14

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window

/**
 * @author: 余辉  
 * @blog: https://blog.csdn.net/silentwolfyh
 * @date: 2024 - 08 - 31 9:28 下午
 * @description:
 * 需求：
 * 1、找出连续登录超过3天的用户
 *
 * 步骤：
 * 1、开窗，按照uid分区，按照dt排序，标记rn
 * 2、然后用date_sub函数，用dt减去rn，标记为dis
 * 3、使用uid和dis分组，标记count为cn
 * 4、where cs > 2
 * */
object LoginDSL {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("")
      .master("local[*]")
      .getOrCreate()

    import org.apache.spark.sql.functions._
    import spark.implicits._
    val df = spark.read
      .options(Map("header" -> "true", "inferSchema" -> "true"))
      .csv("BookData/exercise/用户登录/loginUser.csv")

    df.printSchema()
    df.show()

    /** *
     * Error:(29, 40) type mismatch;
     * found   : Symbol
     * required: Int
     * .select('uid, 'dt, date_sub('dt, 'rn))
     */
    val win = Window.partitionBy('uid).orderBy('dt)
    df.select('uid, date_format('dt, "yyyy-MM-dd") as "dt", row_number() over (win) as "rn")
      .select('uid, 'dt, expr("date_sub(dt,rn)") as "dis")
      .groupBy('uid, 'dis).agg(min('dt), max('dt), count('uid) as "cs")
      .where("cs > 2").drop('dis)
      .show()

    /** *
     * 结果
     * +------+----------+----------+---+
     * |   uid|   min(dt)|   max(dt)| cs|
     * +------+----------+----------+---+
     * |guid01|2024-03-04|2024-03-07|  4|
     * |guid02|2024-03-01|2024-03-03|  3|
     * +------+----------+----------+---+
     */
  }

}
