package com.zhl.spark.core.rddtransform

import com.zhl.spark.core.BasicRdd

/**
 * @program: demos
 * @description: ${description}
 * @author: 刘振华
 * @create: 2020-12-01 10:56
 **/
object groupByRdd extends BasicRdd {
    def main(args: Array[String]): Unit = {
        test()
    }

    override def logic(): Unit = {
//        val rdd = sc.makeRDD(List(1,2,3,4))
//
//        rdd.groupBy(num=>{
//            if (num % 2 == 0) "偶" else "奇"
//        }).collect().foreach(println)
//        0:0:0:0:0:0:0:1 - - [02/Dec/2020:18:39:20 +0800] "GET /login_tlm.jsp HTTP/1.1" 404 -

//        sc.textFile("spark//datas//tomcat.log").map(
//            line =>{
//                val strs : Array[String] = line.split("2020:")
//                val after = strs(1).split(":")
//                (after(0)+":"+after(1),1)
//            }
//        ).groupByKey().map(t=>(t._1,t._2.size)).sortByKey().foreach(println)
        sc.textFile("spark//datas//tomcat.log").map(
            line =>{
                val strs : Array[String] = line.split("2020:")
                val after = strs(1).split(":")
                (after(0)+":"+after(1),1)
            }
        ).groupBy(_._1).map(t=>(t._1,t._2.size)).foreach(println)
    }
}
