package Spark._sql.day01

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}


object FirstAndLastDome {

  def main(args: Array[String]): Unit = {
    val session: SparkSession = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local[*]")
      .getOrCreate()

    import session.implicits._

    val lines: Dataset[String] = session.read.textFile("D:\\07spark\\spark-sql01\\sparksql-1(1)\\作业\\lf.txt")

    val res2: DataFrame = lines.map(str => {
      val arrs: Array[String] = str.split(",")
      val sid: String = arrs(0)
      val dt: Long = arrs(1).toInt

      (sid, dt)
    }).toDF("session_id", "access_time")



      res2.createTempView("fb_ss")


     var r =  session.sql(
            """
              |SELECT
              |session_id,
              |FIRST_VALUE(access_time) OVER(PARTITION BY session_id ORDER BY access_time ASC) begin_time,
              |LAST_VALUE(access_time) OVER(PARTITION BY session_id ORDER BY access_time ASC ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) end_time
              |FROM fb_ss
            """.stripMargin)


    r.show()
session.stop()







  }



}
