import java.text.SimpleDateFormat

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

/**
  * Created by wuyunpeng on 2016/4/12.
  */
object NginxLogsEtl {

  val monthMap = Map("Jan" -> "01", "Feb" -> "02", "Mar" -> "03", "Apr" -> "04", "May" -> "05", "Jun" -> "06", "Jul" -> "07", "Aug" -> "08", "Sep" -> "09", "Oct" -> "10", "Nov" -> "11", "Dec" -> "12")

  def etl(path:String,sc:SparkContext) ={

    val nginxRdd = sc.textFile(path).map{
      log => {
        val arr = log.split(" ")
        val time = arr(3).substring(1)
        val paramsStr = arr(6)
        var result = ""
        var udid = ""

        if(paramsStr.contains("?")){
          val params = arr(6).split("""\?""")(1).split("""&""")
          for( i <- params){
            if(i.startsWith("udid")){
              udid = i.split("=")(1)
            }
          }
          result = udid + "," + params.mkString(",") + "," + convertToTimeStamp(time,monthMap)
//          println(result)
        }
        result
      }
    }
    nginxRdd
  }

  def convertToTimeStamp(dateOfNginx:String,monthMap:Map[String,String]): Long ={

    val dateArr = dateOfNginx.split("/")
    val timeArr = dateArr(2).split(":")
    val h = monthMap.get(dateArr(1)).get.toInt + 7
    val newTime = timeArr(0) + "/" + monthMap.get(dateArr(1)).get + "/" + dateArr(0) + " " + h + ":" + timeArr(2) + ":" + timeArr(3)
    val date = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").parse(newTime)
    date.getTime
  }
}
