package com.hdaccp.ch11
import com.ggstar.util.ip.IpHelper
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{StringType, StructField, StructType}

object MyLogCovertorUtil {
  val struct = StructType(
    Array(
      StructField("ip",StringType),
      StructField("fangdate",StringType),
      StructField("url",StringType),
      StructField("coursetype",StringType),
      StructField("coursecode",StringType),
      StructField("waiurl",StringType),
      StructField("statuscode",StringType)
    )
  )
//GET class/130.html HTTP/1.0
  //GET course/list HTTP/1.0
  def parseLog(log:String) = {
      val splits = log.split("\t")
      val myip = splits(0)
      val ip = IpHelper.findRegionByIp(myip)
      val mydate =splits(1)
      val date = mydate.substring(0,10).replaceAll("-","")
      val myurl= splits(2)
      val waiurl = splits(3)
      val statuscode = splits(4)

      val url = myurl.split(" ")
      val cousetypecode = url(1)
      val cousetypes = cousetypecode.split("/")
      val cousetype = cousetypes(0)
      var mycousecode = cousetypes(1)
      var cousecode = ""

      if(mycousecode.indexOf(".") == -1){
         cousecode = mycousecode
       }
     else{
        cousecode = mycousecode.substring(0,mycousecode.indexOf("."))
      }


      Row(ip,date,myurl,cousetype,cousecode,waiurl,statuscode)
  }

}
