package com.larry.spark.rdd.transform

import org.apache.spark.{SparkConf, SparkContext}

object RDD_Oper_Req {

  def main(args: Array[String]): Unit = {
    //TODO  使用spark join

    val conf = new SparkConf().setMaster("local[*]").setAppName("req")
    val sc = new SparkContext(conf)

    val lines = sc.textFile("data/agent.log")
    //1516609143867 6 7 64 16
    //((6,16),1)
    val datat = lines.map(
      l => {
        val data = l.split(" ")
        ((data(1), data(4)), 1)
      }
    )
    //((6,16),10)
    val datasum = datat.reduceByKey(_ + _)
    //(6,(16,10))
    val dataFormat = datasum.map {
      case ((p, c), sum) => {
        (p, (c, sum))
      }
    }

    //(6,(16,10)) (3,(16,10))
    val datag = dataFormat.groupByKey()
    //
    val data = datag.mapValues(
      it => {
        it.toList.sortBy(_._2)(Ordering.Int.reverse).take(3)
      }
    )

    data.collect().foreach(println)
    sc.stop()
  }

}
