package com.zyh.day05.loadsave

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object ParquetTest {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local[*]")
      .appName("pt")
      .getOrCreate()

//    val list = List((1, "xiao1hei", 18), (2, "xiao2hei", 20), (3, "xiao3hei", 22), (4, "xiao4hei", 23))
//    import spark.implicits._
//    val df: DataFrame = list.toDF("id", "name", "age")
//    //如何将df以parquet格式输出
//    //覆盖输出
//    df.write.mode(SaveMode.Overwrite).parquet("file:///D:/data/parquet")
//
    val df: DataFrame = spark.read.parquet("file:///D:/data/parquet")
    df.show()
    spark.close()
  }
}
