package com.jianghang.class_three.log_format.APP

import java.util.Locale

import org.apache.commons.lang3.time.FastDateFormat
import org.apache.spark.sql.SparkSession

/**
  *解析后的日志格式：
  *   ip country province city time method url protocal http_status bytes_sent referer
  *   user_agent browser_name browser_version engine_name engine_version os_name platform_name  is_molbie
  */
object LogFormat {


  def main(args: Array[String]): Unit = {
    val session = SparkSession
      .builder()
      .master("local[2]")
      .appName("LogFormat")
      .getOrCreate()

    //读取log原始数据
    val rdd_log = session.sparkContext.textFile("")

    //把log日志转换为需要的格式
    rdd_log.map(x => x)

//    val df = session.read.format("json").option("path", "").load()

//    //UDF函数编写
    import org.apache.spark.sql.functions._

    def formatTime() = udf(
      (time: String) => {
        //[30/Jan/2019:00:00:21 +0800]
        val beginIndex = time.indexOf("[") + 1
        val endIndex = time.lastIndexOf("[")
        val time_str = time.substring(beginIndex, endIndex)
        val time_long = FastDateFormat.getInstance("dd/MMM/yyyy:HH:mm:ss Z", Locale.ENGLISH)
          .parse(time_str).getTime
        FastDateFormat.getInstance("yyyyMMddHHmm").format(time_long)
      }
    )
//
//    //处理df的一个字段
//    df.withColumn("formattime", formatTime()(df("time")))



  }

}
