package com.log.anal.log

import org.apache.parquet.format.IntType
import org.apache.spark.sql.Row
import org.apache.spark.sql.execution.columnar.STRUCT
import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}

object AccessConvertUtils {

  // 定义输出类型
  val struct = StructType(
    Array(
      StructField("time", StringType),
      StructField("infoLevel", StringType),
      StructField("cat", StringType),
      StructField("pyFileName", StringType),
      StructField("name", StringType),
      StructField("content", StringType),
      StructField("day", StringType),

    )
  )

  def parseLog(log: String) = {
    try {
      val splits = log.split("\t")
      val time = splits(0)
      val infoLevel = splits(1)
      val pyFileName = splits(2)
      val content = splits(3)

      val pyNameSplit = pyFileName.split(".py")
      var name = ""
      if (pyNameSplit.nonEmpty) {
        name = pyNameSplit(0)
      }

      var cat = ""
      if (infoLevel == "DEBUG") {
        cat = "0"
      }else if (infoLevel == "INFO") {
        cat = "1"
      }else {
        cat = "2"
      }

      val day = time.substring(0, 10).replace("-", "")
        .replace(":", "").replace(" ", "")

      Row(time, infoLevel, cat, pyFileName, name, content, day)
    } catch {
      case e: Exception => {
        Row(0)
      }
    }

  }
}
