package com.jhhe.homework4_2

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import java.util.Locale

object CdnLogAnalyse {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("CdnLogAnalyse").setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    val cdnLog = sc.textFile("data/cdn.txt")
    val rdd: RDD[(String, String, String)] = cdnLog.map { line =>
      val fields = line.split("\\s+")
      val ip = fields(0)
      val time = fields(3)
      val url = fields(6)
      (ip, time, url)
    }
    rdd.cache()

    // 独立IP数
    val ipCnt = rdd.map(_._1).distinct.count()
    println(s"独立IP数:$ipCnt")

    // 每个视频独立IP数
    rdd.filter(_._3.endsWith(".mp4"))
      .map(x => (x._3, x._1))
      .distinct()
      .countByKey()
      .foreach(x => println(x._1 + " " + x._2))

    // 统计一天中每个小时流量
    rdd.map(x => (parseDate(x._2.substring(1)), 1))
      .countByKey()
      .foreach(x => println(x._1 + " " + x._2))

    sc.stop()
  }

  /**
   * 解析日期：dd/MMM/yyyy:HH:mm:ss -> yyyyMMddHH
   *
   * @param dateStr
   * @return
   */
  def parseDate(dateStr: String): String = {
    try {
      val formatter: DateTimeFormatter = DateTimeFormatter.ofPattern("dd/MMM/yyyy:HH:mm:ss", Locale.ENGLISH)
      val localDateTime = LocalDateTime.parse(dateStr, formatter)
      val str = localDateTime.format(DateTimeFormatter.ofPattern("yyyyMMddHH"))
      // 假设小于2000年和大于当前年的都是无效日期
      if (str < "2000000000" || str > LocalDateTime.now().getYear.toString()) return null
      str
    } catch {
      case _: Exception =>
        println(s"日期解析失败：$dateStr")
        null
    }
  }
}
