package com.hdaccp.ch09

import org.apache.spark.sql.{SaveMode, SparkSession}

/**
  * 数据的清洗,将一个txt文件转换成一个parquet文件
  * DataFrame默认加载parquet文件，
  */
object MusicClean {
  def main(args: Array[String]): Unit = {
    //1.得到SparkSession对象
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("Ch09usicCleanApp")
      .getOrCreate()

    //2.引入隐式转换
    import spark.implicits._

    //3.file -> rdd
    val rdd = spark.sparkContext.textFile("F:\\accp教学\\sparkresources\\music1.txt")

    //4. rdd-> dataframe
    val df =  rdd.map(x=>x.split("\t")).map(y=>Music(y(0),y(1),y(2),y(3),y(4),y(5))).toDF()

    //5 dataframe -> parquet
    df.coalesce(1).write.format("parquet").mode(SaveMode.Overwrite).save("F:\\accp教学\\sparkresources\\clean13")

    spark.stop()
  }
}
