package com.sy.nlp

import org.apache.spark.sql.SparkSession

import java.text.SimpleDateFormat
import scala.collection.mutable

object WindowTest extends App {

  def dateToTimestamp(string: String) = {
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    sdf.parse(string).getTime
  }

  val spark = SparkSession
    .builder()
    .appName("test")
    .master("local[*]")
    .getOrCreate()

  spark.sparkContext.setLogLevel("WARN")

  spark.read.json("a.json")
    .createOrReplaceTempView("a")
  //    .groupBy($"index", window($"time", "30 minutes"))
  //    .sum("index")
  //    .show()

  spark.udf.register("array_sub", (list: mutable.WrappedArray[String]) => {
    list
      .sliding(2)
      .map(x => dateToTimestamp(x.last) - dateToTimestamp(x.head))
      .toList
  })

  spark.sql("select index,collect_list(time),array_sub(collect_list(time)),count(1) from a group by index,window(time,'30 minutes') ")
    .rdd
    .foreach(row => {
      val value0 = row.get(0)
      val value1 = row.get(1)
      val value2 = row.get(2)
      println(value0 + ":" + value1 + "-" + value2)
    })
  //    .show()

}


