package com.me.bigdata

import java.net.URLDecoder

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 解析nginx日志，统计各接口访问次数，并按访问次数倒序
  * 日志格式：
  * 10.0.0.164 - - [30/Oct/2017:10:11:21 +0800] "POST /talk-mobile/api/userHasRemind HTTP/1.1" 200 61 "-" "iuReader/2.2.3 (iPhone; iOS 10.3.3; Scale/2.00)"
  * 10.0.0.164 - - [30/Oct/2017:10:11:21 +0800] "POST /api-mobile/api/userSysMessageHasRemind HTTP/1.1" 200 61 "-" "iuReader/2.2.3 (iPhone; iOS 10.3.3; Scale/2.00)"
  * 10.0.0.164 - - [30/Oct/2017:10:11:21 +0800] "POST /api-mobile/api/userHasRemind HTTP/1.1" 200 60 "-" "iuReader/2.2.3 (iPhone; iOS 10.3.3; Scale/2.00)"
  * 10.0.0.164 - - [30/Oct/2017:10:11:21 +0800] "POST /api-mobile/api/userHasRemind HTTP/1.1" 200 61 "-" "iuReader/2.2.3 (iPhone; iOS 10.3.3; Scale/2.00)"
  */
object ScalaParseLog {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("ScalaParseLog")
    val sc = new SparkContext(conf)

    val file: RDD[String] = sc.textFile(args(0))
    val result = file
      .filter(line => (line.indexOf("/talk-mobile/api/") > 0 && line.indexOf("HTTP/") > 0 && (line.indexOf("GET") > 0 || line.indexOf("POST") > 0)))
      .map(line => handle(line))
      .reduceByKey(_+_).sortBy(_._2, false)

    println(result.collect.toBuffer)
    //result.saveAsTextFile(args(1))
  }

  def handle(line: String): (String, Int) = {
    try {
      val decodeLine = URLDecoder.decode(line, "gb2312")
      if(decodeLine.indexOf("GET") > 0) {
        substr(decodeLine, "GET")
      } else {
        substr(decodeLine, "POST")
      }
    } catch {
      case e:Exception => ("exception", 1)
    }
  }

  def substr(line: String, method: String): (String, Int) = {
    if(line.indexOf("?") > 0 && line.indexOf("?") < line.indexOf("HTTP/")) {
      (line.substring(line.indexOf(method), line.indexOf("?")).trim, 1)
    } else {
      (line.substring(line.indexOf(method), line.indexOf("HTTP/")).trim, 1)
    }
  }
}
