package com.atbeijing.bigdata.spark.core.rdd.operator.transform

import org.apache.spark.{SparkConf, SparkContext}

object Spark06_Oper_Transform_2 {

    def main(args: Array[String]): Unit = {

        val conf = new SparkConf().setMaster("local[*]").setAppName("TransformOperator")
        conf.set("spark.local.dir", "e:/test")
        val sc = new SparkContext(conf)

        // TODO 算子 - 转换 - groupBy - (1 / 10)
        // 从服务器日志数据apache.log中获取每个时间段访问量。
        // Word : 10
        // Count : 1
        // xxxxxxxxxxxxxxxxx => (10, 1) => (10, 100)
        val rdd = sc.textFile("data/apache.log")
        val timeRDD = rdd.map(
            line => {
                val datas = line.split(" ")
                val time = datas(3)
                val hour = time.substring(11, 13)
                (hour, 1)
            }
        )

        timeRDD.groupBy(_._1).mapValues(_.size).collect().foreach(println)



        sc.stop()

    }
}
