package com.dxf.bigdata.D05_spark_again

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object GroupBy {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")

    //    val sc = new SparkContext(sparkConf)
    //
    //    val scRDD: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4), 2)
    //
    //    def groupFunction(num: Int) = {
    //      num % 2
    //    }
    //
    //    scRDD.groupBy(groupFunction).foreach(println)

    // apache.log 计算 相同小时点击次数

    val context = new SparkContext(sparkConf)

    val dataRDD: RDD[String] = context.textFile("./datas/apache.log",2)

    val timeRdd: RDD[String] = dataRDD.map(
      line => {
        val words: Array[String] = line.split(" ")
        words(3)
      }
    )

    val hourRDD: RDD[(String, Iterable[(String, Int)])] = timeRdd.map(x => {
      val time: String = x.substring(11, 13)
      (time, 1)
    }).groupBy(_._1)


    val value: RDD[(String, Int)] = hourRDD.map{
      case (hour,iter) => (hour,iter.size)
    }

    value.collect().sortBy(x => {
      val hour: String = x._1
      Integer.parseInt(hour)
    }).foreach(println)


  }

}
