package com.log.anal

import org.apache.spark.sql.SparkSession

object SparkStatFormatJob {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("SparkStatFormatJob").master("local[2]").getOrCreate()

    val logs = spark.sparkContext.textFile("file:///Users/changqingai/workspace_code/learn/spark-learn/datasets/client.log")
    logs.take(10).foreach(println)

    val logRdd = logs.map(line=>{
      val splits = line.split(" ")
      // 去掉毫秒信息
      val time = splits(0) + " " + splits(1)
      val infoLevel = splits(3)
      val pyFileName = splits(5)
      val content = splits(7)
      DateUtils.parse(time) + "\t" + infoLevel+ "\t" + pyFileName + "\t" + content
    })
    logRdd.saveAsTextFile("file:///Users/changqingai/workspace_code/learn/spark-learn/datasets/tmp/log1/")

    spark.stop()
  }
}
