package core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 知识点：
 *  sortBy
 */
object core02_operator_pvuv {


  /**
   * 计算PV UV 的top5
   * @param args
   */
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("test")

    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")

    // 42.62.88.214	新疆	2018-11-12	1542011088714	734986595720971991	www.baidu.com	Click
    val dataRdd: RDD[String] = sc.textFile("data/pvuvdata")


    println("PV")
    val pvDataRdd: RDD[(String, Int)] = dataRdd.map(line => {
      val arrs: Array[String] = line.split("\\s++")
      (arrs(5), 1)
    }).reduceByKey(_ + _)
      .sortBy(_._2, false)

    val pVArray: Array[(String, Int)] = pvDataRdd.take(5)

    for (elem <- pVArray) {
      println(elem)
    }


    /**
     * 某个网站有多少个不重复的ip访问
     */
    println("UV")

    val uvArray: Array[(String, Int)] = dataRdd.map(line => {
      val arrs: Array[String] = line.split("\\s++")
      (arrs(5), arrs(0))
    }).distinct()
      .map(k => (k._1, 1))
      .reduceByKey(_+_)
      .sortBy(_._2, false)
      .take(5)


    for (elem <- uvArray) {
      println(elem)
    }


  }

}
