package cn.lagou.part02

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object LogAnaylse {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[4]")
      .appName(this.getClass.getCanonicalName)
      .getOrCreate()

    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("warn")

    val cdnRDD: RDD[String] = sc.textFile("data/cdn.txt")

    //    aloneIPs1(cdnRDD)
    //    println(cdnRDD.getNumPartitions)
    //    aloneIPs(cdnRDD)
    //    videoIPs(cdnRDD)

//    hourFlow(cdnRDD)
        hourPoor(cdnRDD)
    //        test()

    sc.stop()
  }

  def aloneIPs(rdd: RDD[String]): Unit = {
    val ips: RDD[(String, Int)] = rdd.map(line => {
      val fields = line.split("\\s+")
      fields(0)
    }).map(x => (x, 1))
      .reduceByKey(_ + _)
      .sortBy(_._2, ascending = false)

    ips.foreach(println)
  }

  def aloneIPs1(cdnRDD: RDD[String]) = {
    //匹配ip地址
    val IPPattern = "((?:(?:25[0-5]|2[0-4]\\d|((1\\d{2})|([1-9]?\\d)))\\.){3}(?:25[0-5]|2[0-4]\\d|((1\\d{2})|([1-9]?\\d))))".r

    val ipnums = cdnRDD
      .flatMap(x => (IPPattern findFirstIn x))
      .map(y => (y, 1))
      .reduceByKey(_ + _)
      .sortBy(_._2, ascending = false)

    ipnums.foreach(println)
  }

  def videoIPs(rdd: RDD[String]): Unit = {
    val res: RDD[(String, String)] = rdd.filter(line => {
      val fields = line.split("\\s+")
      fields(6).endsWith(".mp4")
    }) map (line => {
      val fields = line.split("\\s+")
      // 取出视频名称
      val strs = fields(6).split("/")
      (strs(strs.length - 1), fields(0))
    })

    // aggregateByKey => 定义初值 + 分区内的聚合函数 + 分区间的聚合函数
    val value: RDD[(String, List[String])] = res.aggregateByKey(List[String]())(
      (lst, str) => (lst :+ str), // List("aa", "bb", "cc") :+ "dd" ==> List("aa", "bb", "cc, "dd"")
      (lst1, lst2) => (lst1 ++ lst2) // List("aa", "bb", "cc") ++ List("dd") ==> List("aa", "bb", "cc", "dd")
    ).mapValues(_.distinct)
      .sortBy(_._2.size, false)

    value.saveAsTextFile("data/cdn/videoIPs")
  }

  def hourFlow(rdd: RDD[String]): Unit = {
    val value = rdd.map {
      line =>
        val fields = line.split("\\s+")
        // 获取时间
        // 得到一天的每个小时
        (fields(3).drop(1).dropRight(6), (fields(9).toLong))
    }.reduceByKey(_ + _).sortBy(_._2, false)

    value.collect().foreach(println)
  }

  def test(): Unit = {
    val strings = "https://v-cdn.abc.com.cn/114244.mp4".split("/")
    println(strings(strings.length - 1))
  }

  def hourPoor(cdnRDD: RDD[String]) = {
    val httpSizePattern = ".*\\s(200|206|304)\\s([0-9]+)\\s.*".r
    val timePattern = ".*(2017):([0-9]{2}):[0-9]{2}:[0-9]{2}.*".r
    import scala.util.matching.Regex

    def isMatch(pattern: Regex, str: String) = {
      str match {
        case pattern(_*) => true
        case _ => false
      }
    }

    def getTimeAndSize(line: String) = {
      var res = ("", 0L)
      try {
        val httpSizePattern(code, size) = line
        val timePattern(year, hour) = line
        res = (hour, size.toLong)
      } catch {
        case ex: Exception => ex.printStackTrace()
      }
      res
    }

    cdnRDD
      .filter(x => isMatch(httpSizePattern, x))
      .filter(x => isMatch(timePattern, x))
      .map(x => getTimeAndSize(x))
      .groupByKey()
      .map(x => (x._1, x._2.sum))
      .sortByKey()
      .map(x => x._1 + "时 CDN流量=" + x._2 / (102424 * 1024) + "G")
      .saveAsTextFile("data/cdn/hourPoor")
  }
}
