package cn.lagou.text

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import scala.util.matching.Regex

object CDNTest {

  def isMatch(pattern: Regex, str: String) = {
    str match {
      case pattern(_*) => true
      case _ => false
    }
  }

  def getTimeAndSize(httpSizePattern: Regex , timePattern:Regex,line: String) = {
    var res = ("", 0L)
    try {
      val httpSizePattern(code, size) = line
      val timePattern(year, hour) = line
      res = (hour, size.toLong)
    } catch {
      case ex: Exception => ex.printStackTrace()
    }
    res
  }

  def main(args: Array[String]): Unit = {

    //100.79.121.48 HIT 33 [15/Feb/2017:00:00:46 +0800] "GET http://cdn.v.abc.com.cn/videojs/video.js HTTP/1.1" 200 174055 "http://www.abc.com.cn/" "Mozilla/4.0+(compatible;+MSIE+6.0;+Windows+NT+5.1;+Trident/4.0;)"
    //匹配ip地址
    val IPPattern = "((?:(?:25[0-5]|2[0-4]\\d|((1\\d{2})|([1-9]?\\d)))\\.){3}(?:25[0-5]|2[0-4]\\d|((1\\d{2})|([1-9]?\\d))))".r
    //匹配 http 响应码和请求数据大小
    val httpSizePattern: Regex = ".*\\s(200|206|304)\\s([0-9]+)\\s.*".r
    //[15/Feb/2017:11:17:13 +0800]  匹配 2017:11 按每小时播放量统计
    val timePattern: Regex = ".*(2017):([0-9]{2}):[0-9]{2}:[0-9]{2}.*".r
    val videoPattern = "([0-9]+).mp4".r

    val spark = SparkSession
      .builder()
      .appName(this.getClass.getCanonicalName)
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")
    import spark.implicits._

    val cdnRDD = sc.textFile("data\\cdn.txt")

    val ipnums: RDD[(String, Int)] = cdnRDD
      .flatMap(x => (IPPattern findFirstIn x))
      .map(y => (y, 1))
      .reduceByKey(_ + _)
      .sortBy(_._2, false)
    ipnums.saveAsTextFile("data/cdn/aloneIPs")


    val res: RDD[(String, List[String])] = cdnRDD
      .filter(x => x.matches(".*([0-9]+)\\.mp4.*"))
      .map(x => (videoPattern findFirstIn x toString, IPPattern findFirstIn x toString))
      .aggregateByKey(List[String]())(
        (lst, str) => (lst :+ str),
        (lst1, lst2) => (lst1 ++ lst2)
      )
      .mapValues(_.distinct)
      .sortBy(_._2.size, false)
    res.saveAsTextFile("data/cdn/videoIPs")

    val hpr: RDD[String] = cdnRDD
      .filter(x => isMatch(httpSizePattern, x))
      .filter(x => isMatch(timePattern, x))
      .map(x => getTimeAndSize(httpSizePattern,timePattern,x))
      .groupByKey()
      .map(x => (x._1, x._2.sum))
      .sortByKey()
      .map(x => x._1 + "时 CDN流量=" + x._2 / (102424 * 1024) + "G")
    hpr.saveAsTextFile("data/cdn/hourPoor")

    spark.close()
  }

}
