package com.cyy.log.spark_extract.common

import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

/**
  * @Author: Cyy
  * @Description:
  * @Date:Created in 0:44 2019/5/8
  */
object LogInfo {

  val struct =StructType(
    Array(
      StructField("ip",StringType),
      StructField("time",StringType),
      StructField("access_status",StringType),
      StructField("code",IntegerType),
      StructField("traffic",IntegerType)
    )
  )

  def parseLog_program(log:String):Row={
    try{
      val splits= log.split(" ")
      val ip = splits(0)
      val time = DateUtils.parse(splits(3) + " " + splits(4))
      val access_status = splits(5).substring(1)
      val code = splits(8).toInt
      val traffic = splits(9).toInt
      Row(ip,time,access_status,code,traffic)
    }catch{
      case e:Exception=>
        Row(0)

    }
  }
  def parseLog_reflection(log:String):LogInfoEntity={
    try{
      val splits= log.split(" ")
      val ip = splits(0)
      val time = DateUtils.parse(splits(3) + " " + splits(4))
      val access_status = splits(5).substring(1)
      val code = splits(8).toInt
      val traffic = splits(9).toInt
      LogInfoEntity(ip,time,access_status,code,traffic)
        }catch{
          case e:Exception=>
          LogInfoEntity("","","",0,0)
        }
  }
  case class LogInfoEntity(ip:String,time:String,access_status:String,code:Integer,traffic:Integer)

}
