package com.atguigu.bigdata.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.text.SimpleDateFormat

/**
 * @Author: yqb
 * @Date: 2022/6/8 23:26 
 * @Description: Demon 
 * @Version: 1.0
 * */
object Need06 {
    def main(args: Array[String]): Unit = {
        val need06: SparkConf = new SparkConf().setMaster("local[*]").setAppName("Need06")
        val context = new SparkContext(need06)


        //        val dataRDD = context.makeRDD(List("hello", "Hello", "hive", "hbase"))
        //
        //        dataRDD.groupBy(_(0)).foreach(println)

        val value: RDD[String] = context.textFile("datas/apache.log")
        val value1: RDD[(String, Int)] = value.map(line => {
            val time = line.split(" ")(3)
            val hour: String = time.split(":")(1)
            //            val format = new SimpleDateFormat("yyyy/MM/dd:HH:mm:ss")
            (hour, 1)
        })
        value1.reduceByKey(_ + _).foreach(println)

        context.stop()
    }

}

/**
 * groupby  制定规则进行分区 -->  shuffer 过程
 *
 * // 2. groupby + mapvalues
 *
 */
