package day04.demo

import org.apache.spark.{SparkConf, SparkContext}

import java.text.SimpleDateFormat
import java.util.Date
import scala.io.Source

/**
 * @author wsl
 * @version 2022-04-19-18:42
 *
 */
object WC {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("rdd").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)

    wc()
    wc2(sc)
    wc3(sc)

    sc.stop()

  }

  def wc() = {

    val result: List[(String, Int)] = Source
      .fromFile("sparkcore/input/1.txt")
      .getLines().toList
      .flatMap(_.split(" "))
      .map((_, 1))
      .groupBy(_._1)
      .map(kv => (kv._1, kv._2.size)
      ).toList
      .sortBy(kv => kv._2)

    println(result)
  }


  def wc2(sc: SparkContext): Unit = {

    sc.makeRDD(List("hello", "world", "hi", "hi", "ssss"))
      .flatMap(_.split(","))
      .groupBy(word => word)
      // .map(t => (t._1, t._2.toList.size))
      .map { case (k, v) => (k, v.toList.size) } //偏函数
      .collect().foreach(println)

  }


  //统计每个小时内用户访问量
  def wc3(sc: SparkContext): Unit = {

    sc.textFile("sparkcore/input/user_visit_action.txt")
      .map(
        line => {
          val words: Array[String] = line.split(" ")
          val str: Array[String] = words(1).split("_")
          val time: Date = new SimpleDateFormat("HH:mm:ss").parse(str(0))
          val hour: String = new SimpleDateFormat("HH").format(time)
          (hour, 1)
        }
      )
      //      .groupBy(_._1)
      //      .map {
      //        case (hour, iters) => {
      //          (hour, iters.size)
      //        }
      //      }
      .reduceByKey(_ + _)
      .collect().foreach(println)


  }
}
