package com.hdaccp.ch10

import com.ggstar.util.ip.IpHelper
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}

object MyLogConverterUtil {
  //定义的输出的字段
  val struct = StructType(
    Array(
      StructField("ip",StringType),
      StructField("fangdate",StringType),
      StructField("url",StringType),
      StructField("coursetype",StringType),
      StructField("coursecode",StringType),
      StructField("waiurl",StringType),
      StructField("statuscode",StringType)
    )
  )

  def parseLog(log:String) = {
    try{
      val splits = log.split("\t")
      val myip = splits(0)
      val ip = IpHelper.findRegionByIp(myip)
      val mydate = splits(1)
      val fangdate = mydate.substring(0,10).replaceAll("-","")
      val myurl = splits(2)
      var url0 = myurl.split(" ")(1)
      var urls = url0.split("/")
      var url1 = urls(0)
      var url2 = urls(1)
      var url3 = ""
      if(url2.indexOf(".") == -1){
        url3 = url2
      }
      else{
        url3 = url2.substring(0,url2.indexOf("."))
      }

      val waiurl = splits(3)
      val statucode = splits(4)

      //这个row里面的字段要和struct中的字段对应上
      Row(ip, fangdate, myurl, url1, url3, waiurl,statucode)
    } catch {
      case e:Exception => Row(0)
    }
  }

}
