package cn.lagou.spark.log

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.util.regex.{Matcher, Pattern}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 2、日志分析
 */
object LogAnalyse {
  // 视频
  val videoPattern: Pattern = Pattern.compile("""(\S+) .+/(\S+\.mp4) .*""")
  // 正常返回
  val flowPattern: Pattern = Pattern.compile(""".+ \[(.+?) .+ (200|206|304) (\d+) .+""")

  val topN = 5

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getCanonicalName)
      .master("local[*]")
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("warn")

    val logRDD: RDD[String] = sc.textFile("data/cdn.txt")
    // 计算独立IP数
    ipAnalyse(logRDD)
    // 统计每个视频独立IP数（视频的标志：在日志文件的某些可以找到 *.mp4，代表一个视频文件）
    videoAnalyse(logRDD)
    // 统计一天中每个小时的流量
    flowAnalyse(logRDD)

    sc.stop()
  }

  def ipAnalyse(logRDD: RDD[String]): Unit = {
    val ipRDD: RDD[(String, Int)] = logRDD.map(
      line => (line.split("\\s+")(0), 1)
    )
      .reduceByKey(_ + _)
      .sortBy(_._2, ascending = false)

    println(s"排名前${topN}的ip出现次数")
    ipRDD.take(topN).foreach(println)
    println(s"独立IP总数：${ipRDD.count()}")
    println("*" * 30)
  }

  def videoAnalyse(logRDD: RDD[String]): Unit = {
    val videoIpRDD: RDD[((String, String), Int)] = logRDD.map(line => {
      val matchFlag: Matcher = videoPattern.matcher(line)
      if (matchFlag.matches()) {
        ((matchFlag.group(2), matchFlag.group(1)), 1)
      } else
        (("", ""), 0)
    })

    println(s"\n排名前${topN}的视频独立IP数")
    videoIpRDD.filter { case ((video, ip), count) => video != "" && ip != "" && count != 0 }
      .reduceByKey(_ + _)
      .map { case ((video, _), _) => (video, 1) }
      .reduceByKey(_ + _)
      .sortBy(_._2, ascending = false)
      .take(topN)
      .foreach(println)
    println(s"视频独立IP总数：${videoIpRDD.count()}")
    println("*" * 30)
  }

  def flowAnalyse(logRDD: RDD[String]): Unit = {
    val flowRDD: RDD[(String, Long)] = logRDD.map(line => {
      val matchFlag: Matcher = flowPattern.matcher(line)
      if (matchFlag.matches())
        (matchFlag.group(1).split(":")(1), matchFlag.group(3).toLong)
      else
        ("", 0L)
    })

    println("\n每小时流量:")
    flowRDD.filter { case (hour, flow) => flow != 0 }
      .reduceByKey(_ + _, 1)
      .collectAsMap()
      .mapValues(_ / 1024 / 1024 / 1024)
      .toList
      .sortBy(_._1)
      .foreach { case (k, v) => println(s"${k}时 => 流量${v} Gb") }
  }

}
