package com.king.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql._


object SparkSQL07_read_write {
  def main(args: Array[String]): Unit = {
    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .getOrCreate()

    //写代码不管用不用都导入。
    import spark.implicits._

    //parquet
    val dfParquet: DataFrame = spark.read.parquet("data/users.parquet")
    dfParquet.show()

    println("-----------------------")

    //json
    val dfJson: DataFrame = spark.read.json("data/user.json")
    dfJson.show()

    println("-----------------------")

    val dfCSV: DataFrame = spark.read.csv("data/emp.csv")
    dfCSV.show()

    println("-----------------------")
    val dfCSV1: DataFrame = spark.read.option("sep",";").csv("data/people.csv")
    dfCSV1.show()


    spark.stop()

  }
}
