package com.hdaccp.ch11

import org.apache.spark.sql.{SaveMode, SparkSession}

object MyLogCleanJob {
  def main(args: Array[String]): Unit = {
    //1.得到SparkSession对象
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("CleanJobApp")
      .getOrCreate()
    //2.引入隐式转换
    import spark.implicits._
    val rdd = spark.sparkContext.textFile("F:\\accp教学\\sparkresources\\log5")
    //rdd=>dataframe
    val df = spark.createDataFrame(rdd.map(x=>MyLogCovertorUtil.parseLog(x)),MyLogCovertorUtil.struct)
    //dataframe=>parquet
    df.coalesce(1).write.format("parquet").mode(SaveMode.Overwrite).save("F:\\accp教学\\sparkresources\\cleanlog5")
   // df.show(false)
    spark.stop()

  }
}
