package com.hngy.scala.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
  * 需求：load和save的使用
  */
object LoadAndSaveOpScala {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    val sparkSession = SparkSession.builder().appName("LoadAndSaveOpScala").config(conf).getOrCreate()
    val sc = sparkSession.sparkContext

    //读取数据
    val stuDf = sparkSession.read.format("json").load("F:\\BaiduNetdiskDownload\\hadoop\\source\\bigdata_course_materials\\spark2\\student.json")

    //保存数据
    stuDf.select("name","age").write.format("csv").save("D:\\cache\\bigdata\\spark2\\loadAndSaveOpScala")
    //stuDf.select("name","age").write.format("csv").save("hdfs://hadoop001:9001/out-save001")

    sparkSession.stop()
  }
}
