package com.ww.spark.core

import org.apache.spark.{SparkConf, SparkContext}

object RDDTry02_api_sort {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("try02")
    val sc = new SparkContext(conf)

    val file = sc.textFile("data/pvuvdata", 5)


    println("\n==========================统计网站的点击量,用户不去重==================================")
    //199.111.148.214	重庆	2018-11-12	1542011088714	6755235587059844279	www.suning.com	Regist
    val kv = file.map(line => (line.split("\t")(5), 1))
    //(www,1)
    val res = kv.reduceByKey(_ + _)
    //(www,2)
    val swap = res.map(_.swap)
    //(2,ww)
    val sorted = swap.sortByKey(false)
    //(2,ww)
    //(1,aa)
    val top5 = sorted.take(5)
    val pv5 = top5.map(_.swap)
    pv5.foreach(println)
    println("\n==========================统计网站的点击量,用户去重==================================")
    val kv1 = file.map(line => {
      val strs = line.split("\t")
      (strs(5), strs(0))
    })
    //(www,1.1.1.1)
    val dis = kv1.distinct()
    //(www,1.1.1.1)
    val map = dis.map(dis => (dis._1, 1))
    val res1 = map.reduceByKey(_ + _)
    val sorted1 = res1.sortBy(_._2, false)
    val nv5 = sorted1.take(5)
    nv5.foreach(println)
  }
}
