package com.hxk.analysis



import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf
import org.slf4j.LoggerFactory
import org.apache.spark.sql.functions._

/**
  *
  --driver-java-options "-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -DsleepDuration=10" \
或者
  --conf "spark.driver.extraJavaOptions=-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -DsleepDuration=10" \

  //每个excutor的相同目录下的本地都需要有这个jar包
--conf spark.executor.extraClassPath=/home/spark-course/spark-rdd-1.0-SNAPSHOT.jar \


--properties-file spark-wordcount.conf \
  *
  */
/*
spark-submit --class com.hxk.analysis.LogsAnalysis \
--name "LogsAnalysis" \
--master yarn \
--
 */
object LogsAnalysis {

  private val logger = LoggerFactory.getLogger("LogsAnalysis")

  def main(args: Array[String]): Unit = {

    //val sleepDuration = System.getProperty("sleepDuration","20").toInt
    //logger.info(s"sleepDuration ===== ${sleepDuration}")

    /**
      * --files /home/hadoop-twq/spark-course/wordcount.properties \
      * 装载系统参数（excutor还是driver的？）
      * 文件上传到/home/hadoop-twq/spark-course
      * 可以指定多个文件，用逗号隔开
      */
    //把系统参数指定到文件中的加载方法
//    val configFileStream = this.getClass.getClassLoader.getResourceAsStream("wordcount.properties")
//    val properties = new Properties()
//    properties.load(configFileStream)
//
//
//    val sleepDurations = properties.getProperty("sleepDuration").toInt
//    logger.info(s"sleepDuration ===== ${sleepDurations}")
//    TimeUnit.SECONDS.sleep(sleepDurations)


/*
    val basePathKey = "spark.timeMerge.dataInputPath"
    val config = new SparkConf()
    config.setAppName("DeviceDataETL")
    val isLocal = !config.contains(basePathKey)

    val basePath = if (isLocal) {
      config.setMaster("local")
      "logdata/apache.access.txt"
    } else config.get(basePathKey)

    val spark = SparkSession.builder()
      .config(config)
      .getOrCreate()
*/
    val spark = SparkSession.builder()
      .appName("LogsAnalysis")
      .master("local")
      .getOrCreate()

/*    val conf = new SparkConf()
    //conf.setAppName("LogsAnalysis")
    if(!conf.contains("spark.master")) {
      conf.setMaster("local")
    }
    val sc = new SparkContext(conf)

    val dataInputPath = conf.get("spark.timeMerge.dataInputPath","logdata/apache.access.txt")
    val baseOutputPath = conf.get("spark.timeMerge.baseOutputPath","logdata/output")*/

    //val rawRDD: RDD[String] = spark.sparkContext.textFile("hdfs://master:9999/user/hadoop-twq/accessLog/apache.access.txt")
    val rawRDD: RDD[String] = spark.sparkContext.textFile("logdata/apache.access.txt")


      val parsedRDD: RDD[ApacheAccessLog] = rawRDD.flatMap(ApacheAccessLog.parseLogLine(_))

      val logDf = spark.createDataFrame(parsedRDD)
    /*
+------------+------------+------+--------------------------+------+------------------------------------------------------------------------------------+--------+------------+-----------+
|ipAddress   |clientIdentd|userId|dateTime                  |method|endpoint                                                                            |protocol|responseCode|contentSize|
+------------+------------+------+--------------------------+------+------------------------------------------------------------------------------------+--------+------------+-----------+
|64.242.88.10|-           |-     |07/Mar/2004:16:05:49 -0800|GET   |/twiki/bin/edit/Main/Double_bounce_sender?topicparent=Main.ConfigurationVariables   |HTTP/1.1|401         |12846      |
|64.242.88.10|-           |-     |07/Mar/2004:16:06:51 -0800|GET   |/twiki/bin/rdiff/TWiki/NewUserTemplate?rev1=1.3&rev2=1.2                            |HTTP/1.1|200         |4523       |
|64.242.88.10|-           |-     |07/Mar/2004:16:10:02 -0800|GET   |/mailman/listinfo/hsdivision                                                        |HTTP/1.1|200         |6291       |
|64.242.88.10|-           |-     |07/Mar/2004:16:11:58 -0800|GET   |/twiki/bin/view/TWiki/WikiSyntax                                                    |HTTP/1.1|200         |7352       |
|64.242.88.10|-           |-     |07/Mar/2004:16:20:55 -0800|GET   |/twiki/bin/view/Main/DCCAndPostFix                                                  |HTTP/1.1|200         |5253       |
|64.242.88.10|-           |-     |07/Mar/2004:16:23:12 -0800|GET   |/twiki/bin/oops/TWiki/AppendixFileSystem?template=oopsmore&param1=1.12&param2=1.12  |HTTP/1.1|200         |11382      |
|64.242.88.10|-           |-     |07/Mar/2004:16:24:16 -0800|GET   |/twiki/bin/view/Main/PeterThoeny                                                    |HTTP/1.1|200         |4924       |
|64.242.88.10|-           |-     |07/Mar/2004:16:29:16 -0800|GET   |/twiki/bin/edit/Main/Header_checks?topicparent=Main.ConfigurationVariables          |HTTP/1.1|401         |12851      |
|64.242.88.10|-           |-     |07/Mar/2004:16:30:29 -0800|GET   |/twiki/bin/attach/Main/OfficeLocations                                              |HTTP/1.1|401         |12851      |
|64.242.88.10|-           |-     |07/Mar/2004:16:31:48 -0800|GET   |/twiki/bin/view/TWiki/WebTopicEditTemplate                                          |HTTP/1.1|200         |3732       |
|64.242.88.10|-           |-     |07/Mar/2004:16:32:50 -0800|GET   |/twiki/bin/view/Main/WebChanges                                                     |HTTP/1.1|200         |40520      |
|64.242.88.10|-           |-     |07/Mar/2004:16:33:53 -0800|GET   |/twiki/bin/edit/Main/Smtpd_etrn_restrictions?topicparent=Main.ConfigurationVariables|HTTP/1.1|401         |12851      |
|64.242.88.10|-           |-     |07/Mar/2004:16:35:19 -0800|GET   |/mailman/listinfo/business                                                          |HTTP/1.1|200         |6379       |
|64.242.88.10|-           |-     |07/Mar/2004:16:36:22 -0800|GET   |/twiki/bin/rdiff/Main/WebIndex?rev1=1.2&rev2=1.1                                    |HTTP/1.1|200         |46373      |
|64.242.88.10|-           |-     |07/Mar/2004:16:37:27 -0800|GET   |/twiki/bin/view/TWiki/DontNotify                                                    |HTTP/1.1|200         |4140       |
|64.242.88.10|-           |-     |07/Mar/2004:16:39:24 -0800|GET   |/twiki/bin/view/Main/TokyoOffice                                                    |HTTP/1.1|200         |3853       |
|64.242.88.10|-           |-     |07/Mar/2004:16:43:54 -0800|GET   |/twiki/bin/view/Main/MikeMannix                                                     |HTTP/1.1|200         |3686       |
|64.242.88.10|-           |-     |07/Mar/2004:16:45:56 -0800|GET   |/twiki/bin/attach/Main/PostfixCommands                                              |HTTP/1.1|401         |12846      |
|64.242.88.10|-           |-     |07/Mar/2004:16:47:12 -0800|GET   |/robots.txt                                                                         |HTTP/1.1|200         |68         |
|64.242.88.10|-           |-     |07/Mar/2004:16:47:46 -0800|GET   |/twiki/bin/rdiff/Know/ReadmeFirst?rev1=1.5&rev2=1.4                                 |HTTP/1.1|200         |5724       |
+------------+------------+------+--------------------------+------+------------------------------------------------------------------------------------+--------+------------+-----------+
only showing top 20 rows
    * */

    //1.根据访问内容大小contentSize字段进行统计，统计所有日志内容大小的平均大小，最大值以及最小值。
    logDf.agg(
      avg("contentSize").as("avg_size"),
      max("contentSize").as("max_size"),
      min("contentSize").as("min_size")
    ).show()
/*
+-----------------+--------+--------+
|         avg_size|max_size|min_size|
+-----------------+--------+--------+
|7775.963726884779|  138789|       0|
+-----------------+--------+--------+
 */


    //2.计算每一个responseCode分别出现的总次数，
    //根据responseCode分组，再进行count
    logDf.select("responseCode").groupBy("responseCode").count().show()
/*
+------------+-----+
|responseCode|count|
+------------+-----+
|         404|    5|
|         200| 1272|
|         302|    6|
|         401|  123|
+------------+-----+
 */

    //3.找出总共出现了10次以上的 ipAddress
    //根据ipAddress进行分组，再count,再筛选count>10的ipAddress
    //logDf.select("ipAddress").groupBy("ipAddress").count().show()
    logDf.select("ipAddress").groupBy("ipAddress").count().filter("count > 10").show(100,false)
/*
+---------------------------------+-----+
|ipAddress                        |count|
+---------------------------------+-----+
|cr020r01-3.sac.overture.com      |44   |
|216-160-111-121.tukw.qwest.net   |12   |
|ip68-228-43-49.tc.ph.cox.net     |22   |
|h24-71-236-129.ca.shawcable.net  |36   |
|pc3-registry-stockholm.telia.net |13   |
|128.227.88.79                    |12   |
|ogw.netinfo.bg                   |11   |
|203.147.138.233                  |13   |
|ts04-ip92.hevanet.com            |28   |
|p213.54.168.132.tisdip.tiscali.de|12   |
|mail.geovariances.fr             |14   |
|ts05-ip44.hevanet.com            |13   |
|ns.wtbts.org                     |12   |
|prxint-sxb3.e-i.net              |14   |
|10.0.0.153                       |188  |
|200-55-104-193.dsl.prima.net.ar  |13   |
|195.246.13.119                   |12   |
|market-mail.panduit.com          |29   |
|h24-70-69-74.ca.shawcable.net    |32   |
|lhr003a.dhl.com                  |13   |
|212.92.37.62                     |14   |
|208-38-57-205.ip.cal.radiant.net |11   |
|207.195.59.160                   |15   |
|proxy0.haifa.ac.il               |19   |
|64.242.88.10                     |452  |
+---------------------------------+-----+
 */


    //4.找出总共出现次数前10的URLPath
    //统计endPoint=》/twiki/bin/view/Main/WebHome?rev=1.25的前面部分/twiki/bin/view/Main/WebHome
    //logDf.select("endpoint").rdd.map(LogParser.parser(_)).collect().foreach(println)
    import spark.implicits._
    val url = logDf.select($"endpoint").groupBy($"endpoint").count().orderBy($"count".desc).limit(10).show(false)
/*
+-----------------------------------------------+-----+
|endpoint                                       |count|
+-----------------------------------------------+-----+
|/twiki/bin/view/Main/WebHome                   |40   |
|/twiki/pub/TWiki/TWikiLogos/twikiRobot46x50.gif|32   |
|/                                              |31   |
|/favicon.ico                                   |28   |
|/robots.txt                                    |27   |
|/razor.html                                    |23   |
|/twiki/bin/view/Main/SpamAssassinTaggingOnly   |18   |
|/twiki/bin/view/Main/SpamAssassinAndPostFix    |17   |
|/cgi-bin/mailgraph2.cgi                        |16   |
|/cgi-bin/mailgraph.cgi/mailgraph_3.png         |16   |
+-----------------------------------------------+-----+
 */


    spark.stop()
    logger.info("EDN LogsAnalysis")
  }

}

