package cn.lagou.test

import org.apache.spark.sql.functions.max
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.expressions.Window

object StartEndDate {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("Demo1")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")
    import spark.implicits._

    spark.sql(
      """
        |CREATE OR REPLACE TEMPORARY VIEW dates
        | USING csv
        |options(path "data/dates.csv",
        | header "false",
        | delimiter " ",
        | inferschema "true")
        |""".stripMargin)
    spark.sql("select * from dates").show

    spark.sql("select * from dates").write
      .format("csv")
      .mode("overwrite")
      .save("data/output")

    val arr = Array("1 2019-03-04 2020-02-03", "2 2020-04-05 2020-08-04", "3 2019-10-09 2020-06-11")
    val rdd = sc.makeRDD(arr).map { line =>
      val fields: Array[String] = line.split("\\s+")
      (fields(1),fields(2))
    }

    val flat: RDD[String] = rdd.flatMap(kv => Array(kv._1, kv._2));
    //flat.foreach(println)


    //val df = spark.createDataFrame(rdd).toDF("startdate","enddate");
    //df.show()

    val df = arr.toList.toDF();

    val w1 = Window.orderBy($"value" asc).rowsBetween(0, 1)
    df.as[String]
      .map(str => str.split(" ")(1) + " " + str.split(" ")(2))
      .flatMap(str => str.split("\\s"))
      .distinct()
      .sort($"value" asc)
      .withColumn("enddate", max("value") over (w1))
      .show()

    spark.close()

  }

}
