package com.dxf.bigdata.D05_spark_again.action

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object ToDebugString {
  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")
    sparkConf.set("spark.port.maxRetries","100")
    // apache.log 计算 相同小时点击次数
    val context = new SparkContext(sparkConf)

    val dataRDD: RDD[String] = context.textFile("./datas/apache.log",2)

    val timeRdd: RDD[String] = dataRDD.map(
      line => {
        val words: Array[String] = line.split(" ")
        words(3)
      }
    )

    val hourRDD: RDD[(String, Iterable[(String, Int)])] = timeRdd.map(x => {
      val time: String = x.substring(11, 13)
      (time, 1)
    }).groupBy(_._1)


    val value: RDD[(String, Int)] = hourRDD.map{
      case (hour,iter) => (hour,iter.size)
    }
    // toDebugString
    println(value.toDebugString)
    value.collect().sortBy(x => {
      val hour: String = x._1
      Integer.parseInt(hour)
    }).foreach(println)


  }

}
