package com.li.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

object LoadAndSaveOp {
  def main(args: Array[String]): Unit = {
    val sc: SparkSession = getSparkSession
    //读取
    val dataDf = sc.read.format("json").load("/Users/lijiacen/Downloads/chealse.json")
    //保存
    dataDf.select("name", "age")
      .write
      .format("csv")
      .mode(SaveMode.Append) //追加
      .save("/Users/lijiacen/Downloads/chealse")

    sc.stop();
  }


  private def getSparkSession = {
    val conf = new SparkConf();
    conf.setMaster("local");
    val session = SparkSession.builder()
      .appName("LoadAndSaveOp")
      .config(conf)
      .getOrCreate()
    session
  }
}
