package cn.doitedu.day07

import org.apache.spark.sql.SparkSession

object T22_SQLFlowCount {

  def main(args: Array[String]): Unit = {
    val days = 3
    val spark = SparkSession.builder().appName(this.getClass.getSimpleName)
      .master("local[4]")
      .getOrCreate()

    //创建DataFrame
    val df = spark.read
      .option("header", "true")
      .option("delimiter", "|")
      .csv("data/flow2.txt")


    df.createTempView("v_flow")

    val res = spark.sql(
      """
        |select
        |    uid,
        |    min(start_time) start_time,
        |    max(end_time) end_time,
        |    sum(flow) flow
        |from
        |(
        |    select
        |        uid,
        |        start_time,
        |        end_time,
        |        flow,
        |        sum(flag) over (partition by uid order by start_time) sum_flag
        |    from
        |    (
        |        select
        |            uid,
        |            start_time,
        |            end_time,
        |            flow,
        |            if(to_unix_timestamp(start_time) - to_unix_timestamp(lag_time) > 600, 1, 0) flag
        |        from
        |        (
        |            select
        |                uid,
        |                start_time,
        |                end_time,
        |                flow,
        |                lag(end_time, 1, start_time) over(partition by uid order by start_time) lag_time
        |            from
        |                v_flow
        |        )
        |    )
        |)
        |group by uid, sum_flag
        |""".stripMargin)

    res.show()

  }

}
