package com.zhao.mianshi

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * Description: <br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/11/16 15:48
 * 需求:编写连续 7 天登录的总人数
 * 数据:
 * uid dt login_status
 * 1 2019-07-11 1
 * 1 2019-07-12 1
 * 1 2019-07-13 1
 * 1 2019-07-14 1
 * 1 2019-07-15 1
 * 1 2019-07-16 1
 * 1 2019-07-17 1
 * 1 2019-07-18 1
 * 2 2019-07-11 1
 * 2 2019-07-12 1
 * 2 2019-07-13 0
 * 2 2019-07-14 1
 * 2 2019-07-15 1
 * 2 2019-07-16 0
 * 2 2019-07-17 1
 * 2 2019-07-18 0
 * 3 2019-07-11 1
 * 3 2019-07-12 1
 * 3 2019-07-13 1
 * 3 2019-07-14 1
 * 3 2019-07-15 1
 * 3 2019-07-16 1
 * 3 2019-07-17 1
 * 3 2019-07-18 1
 *
 * @author 柒柒
 * @version : 1.0
 */

object test3 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName(this.getClass.getName)
      .setMaster("local[*]")


    val spark: SparkSession = SparkSession.builder().config(conf)
      .enableHiveSupport() //默认不支持外部hive,这里需要调用方法
      .getOrCreate()

    spark.sql("use mianshi")

    spark.sql(
      """
        |select uid,
        |       dt
        |from
        |(select t1.uid uid,
        |       date_sub(t1.dt,t1.rm) dt
        |from
        |(select uid,
        |        dt,
        |        row_number() over(distribute by uid sort by dt) rm
        |    from mianshi3
        |where login_status = 1)t1)t2
        |group by uid,dt
        |having count(uid) >= 7
        |""".stripMargin).show()

  }
}
