package com.atbeijing.bigdata.spark.mytest.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}


object Operator_filter {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[6]").setAppName("groupBy")
    val sc = new SparkContext(conf)

    //从服务器日志数据apache.log中获取每个时间段访问量。
    val rdd: RDD[String] = sc.textFile("data/apache.log")

    val result: RDD[(String, Int)] = rdd.map(line => {
      val a: Int = line.indexOf(":")
      val str: String = line.substring(a+1, a + 3)
      (str, 1)
    }).reduceByKey(_ + _)
    result.collect().foreach(println)
    println("=======================================")
    //参数为一个tuple是一个整体,默认升序
    val r1: RDD[(String, Int)] = result.sortBy(a => a._1.toInt)
    r1.collect().foreach(println)

    println("=======================================")
    val r2: RDD[(Int, Int)] = sc.makeRDD(
      List((6, 366), (20, 486), (19, 493), (22, 346))
    )
    val r3: RDD[(Int, Int)] = r2.sortBy(a => a._1.toInt)
    r3.collect().foreach(println)
  }
}
