package com.hdaccp.ch12

import org.apache.spark.sql.{SaveMode, SparkSession}

object LogCleanJob {
  def main(args: Array[String]): Unit = {
    //1.
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("LogCleanJobApp")
      .getOrCreate()
    //2.
    import  spark.implicits._
    //3.
    val rdd = spark.sparkContext.textFile("F:\\accp教学\\sparkresources\\log5")
    //4.
    val df = spark.createDataFrame(rdd.map(x=>MyLogCovertorUtil.parseLog(x)),MyLogCovertorUtil.struct)
   //5.
    df.coalesce(1).write.format("parquet").mode(SaveMode.Overwrite).save("F:\\accp教学\\sparkresources\\cleanlog5_1")

    spark.stop()
  }
}
