package chapter14

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @author: 余辉  
 * @blog: https://blog.csdn.net/silentwolfyh
 * @date: 2024 - 08 - 31 9:28 下午
 * @description:
 * 需求：
 * 1、找出连续登录超过3天的用户
 *
 * 步骤：
 * 1、开窗，按照uid分区，按照dt排序，标记rn
 * 2、然后用date_sub函数，用dt减去rn，标记为dis
 * 3、使用uid和dis分组，标记count为cn
 * 4、where cs > 2
 * */
object LoginSQL {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("")
      .master("local[*]")
      .getOrCreate()

    val frame: DataFrame = spark.read
      .option("header", true)
      .csv("doc/exercise/用户登录/loginUser.csv")

    frame.printSchema()
    frame.show()

    frame.createTempView("login")

    spark.sql(
      """
        |select
        |uid , min(dt) , max(dt) ,count(1) as cts
        |from
        |(
        |	select
        |		uid , dt ,date_sub(dt,rn) as dis
        |	from
        |	(
        |		select
        |				uid , dt , row_number() over(partition by uid order by dt) rn
        |		from login
        |	) t2
        |) t3
        |group by uid , dis
        |having cts > 2
        |""".stripMargin).show()

    /** *
     * 结果
     * +------+----------+----------+---+
     * |   uid|   min(dt)|   max(dt)| cs|
     * +------+----------+----------+---+
     * |guid01|2024-03-04|2024-03-07|  4|
     * |guid02|2024-03-01|2024-03-03|  3|
     * +------+----------+----------+---+
     */
  }

}
