package com.study.spark.scala.nginx.util

import org.apache.spark.sql.{Row}
import org.apache.spark.sql.types._

/**
  *
  * @author: stephen.shen
  * @create: 2019-03-11 9:31
  */
object LogConvertUtil {

  val struct = StructType(
    Array(
      StructField("ip", StringType),
      StructField("province", StringType),
      StructField("date", StringType),
      StructField("time", StringType),
      StructField("url", StringType),
      StructField("statusCode", IntegerType),
      StructField("traffic", LongType),
      StructField("referer", StringType)
    )
  )

  def convert(line: String) = {
    try {
      val splits = line.split(" ")
      val ip = splits(0)
      val datetime = DateUtils.parse(splits(3) + " " + splits(4))
      val date = DateUtils.getDate(datetime)
      val time = DateUtils.getDate(datetime)
      val url = splits(6)
      val code = splits(8)
      val bytes = splits(9)
      val referer = splits(10)

      //这个row里面的字段要和struct中的字段对应上
      Row(ip, IPUtils.getProvince(ip), date, time, UrlUtils.replaceIndex(UrlUtils.clearParam(url)), code.toInt, bytes.toLong, UrlUtils.getDomain(referer.replaceAll("\"", "")))
    } catch {
      case _ :Throwable => {
        Row()
      }
    }
  }

  def main(args: Array[String]): Unit = {
//    val url = "45.33.15.6 - - [03/Aug/2017:00:32:48 +0800] \"\\x00\\x00\\x00\\x01\" 400 166 \"-\" \"-\" \"-\""
//    convert(url)

    println(Row().length==0)
  }
}
