package io.wen.bd.s6m2.spark

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

case class CdnData(ip: String, hit: String, respTime: String, reqTime: String, url: String, respSize: String)

object LogAnalysis {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("spark session")
      .master("local[*]")
      .getOrCreate()

    spark.sparkContext.setLogLevel("WARN")

    val cdhPath = "file:///D:\\workspace\\project\\bd-hw\\stage6-module2-spark-core-sql\\data\\cdn.txt"

    import spark.implicits._
    val cdnDS = spark
      .sparkContext
      .textFile(cdhPath)
      .map(line => line.split(" "))
      .map(arr => CdnData(arr(0), arr(1), arr(2), arr(3), arr(6), arr(9)))
      .toDS

    // 添加分析用字段
    val extendCdnDF = cdnDS
      .withColumn("respSize", expr("cast(respSize as int)"))
      .withColumn("reqTime", substring($"reqTime", 2, 20))
      .withColumn("isMp4", $"url".endsWith(".mp4"))
      .withColumn("dt", to_date(substring($"reqTime", 1, 11), "dd/MMM/yyyy"))
      .withColumn("hour", substring($"reqTime", 13, 2))
      .withColumn("dayHour", expr("dt || hour"))

    extendCdnDF.show(3, false)

    // 2.1、计算独立IP数
    val ipCount = extendCdnDF.dropDuplicates("ip").count
    println("独立ip数：" + ipCount)

    // 2.2、统计每个视频独立IP数（视频的标志：在日志文件的某些可以找到 *.mp4，代表一个视频文件）
    val mp4DF = extendCdnDF
      .filter("isMp4")
      .groupBy("url")
      .count

    println("==================")
    println("每个视频独立IP数：")
    mp4DF.show(3, false)

    // 2.3、统计一天中每个小时的流量
    val dayHourDF = extendCdnDF
      .groupBy("dayHour")
      .sum("respSize")

    println("==================")
    println("一天中每个小时的流量：")
    dayHourDF.show(3, false)
  }

}