package com.bw.sparksql1.job3

/**
  *
  *  save数据
  */
object Job9 {
    def main(args: Array[String]): Unit = {
//      Logger.getLogger("org").setLevel(Level.ERROR)
//      val spark = SparkSession
//        .builder()
//        .master("local")
//        .appName("Spark SQL basic example")
//        .getOrCreate()
//      //格式一：parquet
//      val df: DataFrame = spark.read.load("users.parquet")
//      df.write.save("hdfs://hadoop1/sparksql/save1")//默认保存parquet
//
//
//      val df1 = spark.read.format("json").load("people.json")
//      df1.createOrReplaceTempView("people1")
//      spark.sql("select name from people1").write.format("json").save("test.json")
//
//
////      spark.sql("select name from people1").write.format("csv").save("test.json")
//      spark.sql("select name from people1").write.format("csv").save("test.csv")
//      df1.write.mode(SaveMode)
//
//      spark.sql("select name from people1").write.mode(SaveMode.Append).format("json").save("mode-test.json")
    }
}