package com.test.cn.spark.core

import java.text.SimpleDateFormat
import java.util.Locale

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, Row, SparkSession}

//IP	命中率(Hit/Miss)	响应时间	请求时间	请求方法	请求URL	请求协议	状态码	响应大小referer 用户代理
//    //创建sql
//    val dataFrame: DataFrame = spark.sql(
//      """
//        |create or replace temporary view log_info
//        |using csv
//        |options(path "spark_scala_home_work/src/data/cdn.txt",
//        |       header "true",
//        |       inferschema "true",
//        |       delimiter " ")
//      """.stripMargin)
case class LogInfo(ip: String, hit: String, respTime: String, reqTime: String, reqMethod: String, url: String, protocol: String, httpStatus: String, size: String, referer: String, agent: String)

//ip hit respTime reqTime reqMethod url protocol httpStatus size referer agent
object LogQuestion {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .appName("LogQuestion")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._
    import spark._

    val textFileRDD: RDD[String] = spark.sparkContext.textFile("spark_scala_home_work/src/data/cdn.txt")
    val infoRDD: RDD[LogInfo] = textFileRDD.map {
      case text: String => {
        val s: Array[String] = text.split(" ")
        val respTime: String = (s(3) + s(4)).replaceAll("\\[|\\]", "")
        LogInfo(s(0), s(1), s(2), respTime, s(5).replace("\"", ""), s(6), s(7).replace("\"", ""), s(8), s(9), s(10), s(11))
      }
    }

    spark.createDataset(infoRDD).repartition(1).createTempView("log_info")


    //
    //
    //
    //计算独立IP数
    spark.sql(
      """
        |select count(distinct ip) from log_info
      """.stripMargin)
      .repartition(1)
      .write
      .format("csv")
      .mode("overwrite")
      .save("spark_scala_home_work/src/data/result/ip")


    //统计每个视频独立IP数（视频的标志：在日志文件的某些可以找到 *.mp4，代表一个视频文件）
    //定义一个udf获取mp4
    def getMp4(url: String): String = {
      if (url.contains("?")) {
        url.substring(url.lastIndexOf("/") + 1, url.indexOf("?"))
      } else {
        url.substring(url.lastIndexOf("/") + 1)
      }
    }

    spark.udf.register("getMp4", getMp4 _)
    spark.sql(
      """
        |select video,count(distinct ip)
        | from
        |   (select  getMp4(url) as video, ip from log_info where  position('mp4',url) > 0 ) as t
        | group by video
        |
              """.stripMargin)
      .repartition(1)
      .write
      .format("csv")
      .mode("overwrite")
      .save("spark_scala_home_work/src/data/result/mp4")


    //统计一天中每个小时的流量 new SimpleDateFormat(" Z", Locale.ENGLISH)
    def toDate1(str: String): Long = {
      val format = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss", Locale.ENGLISH)
      format.parse(str).getTime
    }

    spark.udf.register("myToDate", toDate1 _)
    spark.sql(
      """
        |select info.hh,sum(t) from
        |(select  from_unixtime(myToDate(reqTime)/1000,"yyyy/MM/dd-HH") as hh,1 as t from log_info) as info
        |group by info.hh
      """.stripMargin)
      .repartition(1)
      .write
      .format("csv")
      .mode("overwrite")
      .save("spark_scala_home_work/src/data/result/hour")

  }
}
