package cn.doitedu.day07

import org.apache.spark.sql.SparkSession

object T19_SQLContinuedLogin {

  def main(args: Array[String]): Unit = {
    val days = 3
    val spark = SparkSession.builder().appName(this.getClass.getSimpleName)
      .master("local[4]")
      .getOrCreate()

    //创建DataFrame
    val df = spark.read
      .option("header", "true")
      .csv("data/login2.txt")

    df.createTempView("v_login")

    val res = spark.sql(
      s"""
        |select
        |  uid,
        |  min(dt) start_dt,
        |  max(dt) end_dt,
        |  count(1) counts
        |from
        |(
        |  select
        |    uid,
        |    dt,
        |    date_sub(dt, rn) date_dif
        |  from
        |  (
        |    select
        |      uid, -- 用户ID
        |      dt,
        |      row_number() over(partition by uid order by dt) rn
        |    from
        |    (
        |      select
        |        distinct uid, dt
        |      from
        |        v_login
        |    )
        |  )
        |)
        |group by
        |  uid, date_dif
        |having counts >= $days
        |""".stripMargin)

    res.show()



  }

}
