package cn.darksoul3.spark.cases

import org.apache.spark.{SparkConf, SparkContext}

import java.time.LocalDate
import java.time.format.DateTimeFormatter
import java.util.Locale

object Demo {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("word count in spark").setMaster("local[*]")
    val sparkContext = new SparkContext(sparkConf)

    val lines = sparkContext.textFile(args(0), minPartitions = 2)

    val uidAndDate = lines.map(line => {
      val fields = line.split(",")
      val uid = fields(0)
      val date = fields(1)
      (uid, date)
    })

    val grouped = uidAndDate.groupByKey()
    var index = 0
    val uidDateDiff = grouped.flatMapValues(it => {
      val sorted = it.toSet.toList.sorted
      sorted.map(date => {
        val formatter = DateTimeFormatter.ofPattern("yyyy/MM/d", Locale.CHINA)
        val diff = LocalDate.parse(date, formatter).minusDays(index).toString
        index += 1
        (date, diff)
      })
    })

    val result = uidDateDiff.map(t => {
      ((t._1, t._2._2), t._2._1)
    }).groupByKey().mapValues(it => {
      val list = it.toList
      (list.size, list.head, list.last)
    }).filter(t => t._2._1 >= 3).map(t => {
      (t._1._1, t._2._1, t._2._2, t._2._3)
    })


    result.saveAsTextFile("out.txt")


    sparkContext.stop()

  }


}
