package com.imooc.log

import com.imooc.log.util.DateUtils
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

/**
  * Created by zghgchao 2017/12/24 21:41
  * 第一步清洗：抽取出我们需要的指定列的数据
  *
  */
object SparkStatFormatJob {
  def main(args: Array[String]): Unit = {
    SetLogger

    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("SparkStatFormatJob").getOrCreate()

    val accessRDD = spark.sparkContext.textFile("src/data/10000_access.log")

    //    accessRDD.take(10).foreach(println)
    //183.162.52.7 - - [10/Nov/2016:00:01:02 +0800] "POST /api3/userdynamic HTTP/1.1" 200 19501 "www.imooc.com" "-" cid=0&timestamp=1478707261847&uid=2871142&touid=2871142&page=1&secrect=a6e8e14701ffe9f6063934780d9e2e6d&token=3837a5bf27ea718fe18bda6c53fbbc14 "mukewang/5.0.0 (Android 5.1.1; Xiaomi Redmi 3 Build/LMY47V),Network 2G/3G" "-" 10.100.136.65:80 200 0.195 0.195


    accessRDD.map(line => {
      val splits = line.split(" ")
      val ip = splits(0)
      /**
        * 原始日志第三个和第四个字段拼接起来就是完整的访问时间：
        * [10/Nov/2016:00:01:02 +0800]
        */
      val time = splits(3) + " " + splits(4)
      /**
        * 主站 url: "www.imooc.com" ==> www.imooc.com
        */
      val url = splits(11).replace("\"", "")
      //流量
      val traffic = splits(9)

      //      (ip,DateUtils.parse(time),url,traffic)//元组
      DateUtils.parse(time) + "\t" + url + "\t" + traffic + "\t" + ip
    }).saveAsTextFile("src/data/output")
      //.take(20).foreach(println)

    spark.stop()
  }

  def SetLogger() = {
    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("com").setLevel(Level.OFF)
    System.setProperty("spark.ui.showConsoleProgress", "false")
    Logger.getRootLogger().setLevel(Level.OFF);
  }
}
