package com.bdqn.spark.chapter05.value

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.text.SimpleDateFormat
import java.util.Date

object Spark06_RDD_Operator_Transform_Test2 {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("operator-groupby")
    val sc = new SparkContext(sparkConf)
    // 需求：从服务器日志数据apache.log中获取每个时间段访问量。

    val lineRDD: RDD[String] = sc.textFile("input/apache.log")

    val hourRDD: RDD[(String, Int)] = lineRDD.map {
      line => {
        val fields: Array[String] = line.split(" ")
        val dateStr: String = fields(3)

        // 17/05/2015:10:05:03
        // 日 月  年   时 分 秒
        val sdf: SimpleDateFormat = new SimpleDateFormat("dd/MM/yyyy:HH:mm:ss")
        val date: Date = sdf.parse(dateStr)
        val sdf1: SimpleDateFormat = new SimpleDateFormat("HH")
        val hour: String = sdf1.format(date)

        (hour, 1)
      }
    }

    // ("10",List(("10",1),("10",1)))

    val groupRDD: RDD[(String, Iterable[(String, Int)])] =
      hourRDD.groupBy(t2 => t2._1)

    //    val resultRDD: RDD[(String, Int)] =
    //      groupRDD.map((t2: (String, Iterable[(String, Int)])) => {
    //      val hour: String = t2._1
    //      val iter: Iterable[(String, Int)] = t2._2
    //      (hour, iter.toList.size)
    //    })

    val resultRDD: RDD[(String, Int)] = groupRDD.map {
      case (key, iter) => {
        (key, iter.size)
      }
    }

    resultRDD.collect().foreach(println)

    sc.stop()
  }
}
