package sparkcore.day6.lesson02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/5/2.
  */
object LogAnalyer {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("LogAnalyer")
    conf.set("","")
    val sc = new SparkContext(conf)
    val apacheRDD: RDD[ApacheLog] = sc.textFile("D:\\1711班\\第十二天\\资料\\log.txt")
      .map(line => ApacheLog.parseLog(line)).cache()
    /**
      * 需求一：
The average, min, and max content size of responses returned from the server.
      */
    val contengsizeRDD = apacheRDD.map( apachelog => apachelog.contentSize).cache()
    val min = contengsizeRDD.min()
    val max = contengsizeRDD.max()
    val mean = contengsizeRDD.mean()
    println(s"contentSize: max: $max min:$min  avg:$mean")
    /**
      * 需求二：
          A count of response code's returned.
      */
    apacheRDD.map( apachelog => (apachelog.resposeCode,1))
      .reduceByKey(_+_)
      .foreach( tuple =>{
        println("code:"+ tuple._1 + "  count:"+tuple._2)
      })
    /**
      * 需求三：
      All IPAddresses that have accessed this server more than 100 times.
      哪些IP地址访问我们的网站超过N次
      */
    apacheRDD.map( apachelog => (apachelog.ipAddress,1))
      .reduceByKey(_+_)
      .filter( tuple => {
        tuple._2 > 2
      })
      .foreach( tuple =>{
        println("ip:"+ tuple._1 + " count:"+ tuple._2)
      })

    /**
      * 需求四：
      *The top endpoints requested by count.  TopN
      *找出被访问次数最多的地址的前三个
      */
      val topN =apacheRDD.map( apachelog => (apachelog.endPoint,1))
      .reduceByKey(_ + _)
      .sortBy(_._2, false)
      .take(2)
    for(tuple <- topN){
      println("endPoint:"+tuple._1 + " count:"+tuple._2)
    }
    sc.stop()

  }

}
