package com.study.spark.core.scala

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Description:
 * @Author: LiuQun
 * @Date: 2022/2/17 9:46
 */
object TimeStatisticObj {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local").setAppName("TimeStatistic");
    val sc:SparkContext = new SparkContext(conf);

    val lineStr: RDD[String] = sc.textFile("datas/apache.log")
    val timeRdd: RDD[(String, Int)] = lineStr.map(str => {
      val strArray: Array[String] = str.split(" ")
      val timeStr: String = strArray(3).split(":")(1)
      (timeStr, 1)
    })

    val result: RDD[(String, Iterable[(String, Int)])] = timeRdd.groupBy(_._1)

    result.map {
      case (k, v) => (k, v.size)
    }.collect().sorted.foreach(println)



    sc.stop();
  }
}
