package com.haozhen.homework

import org.apache.spark.SparkConf
/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/3  15:04
  */
object Work2 {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.SparkContext
    val conf = new SparkConf().setAppName("work2")
      .setMaster("local[*]")
    val context = new SparkContext(conf)

    context.setLogLevel("WARN")

    var lines = context.textFile("data/cdn.txt")

    val ipUrlRDD = lines.map(line => {
      val fields = line.split("\\s+")
      (fields(0), fields(3), fields(6))
    })

    //独立IP数
    val independentIp = ipUrlRDD.map(_._1).distinct().count()
    println(s"独立IP数量是：$independentIp")

    //每个视频的独立Ip数量
    val independentIpWithMP4 = ipUrlRDD.filter(v=>{
      v._3.endsWith(".mp4")
    }).map(v=>(v._3,v._1)).distinct().countByKey()

    independentIpWithMP4.foreach(x=>println(x._1+"\t"+x._2))

    //统计一天每个小时流量
    val stringToLong = ipUrlRDD.map(x=>(parseDate(x._2.substring(1)),1)).countByKey()
    stringToLong.foreach(x=>println(x._1+"\t"+x._2))
    //
//    ipUrlRDD.map(v=>(v._3,1)).reduceByKey(_+_).foreach(println)
//    println("--------------------")
//    ipUrlRDD.map(v=>((v._3,v._1),1)).reduceByKey(_+_).foreach(println)


    context.stop()
  }

  def parseDate(dateStr :String):String = {
    import java.time.LocalDateTime
    import java.time.format.DateTimeFormatter
    import java.util.Locale
    try {
      val formatter: DateTimeFormatter = DateTimeFormatter.ofPattern("dd/MMM/yyyy:HH:mm:ss", Locale.ENGLISH)

      val time = LocalDateTime.parse(dateStr, formatter)

      val str = time.format(DateTimeFormatter.ofPattern("yyyyMMddHH"))
      if (str < "2000000000" || str > LocalDateTime.now().getYear.toString()) return null
      str
    }catch {
      case _:Exception =>
      print(s"日期解析错误： $dateStr")
       null
    }
  }
}
