package dataframe

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object DataFrame_JsonFileTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.set("spark.app.name","DataFrame_JsonFileTest")
    conf.set("spark.master","local[*]")
    conf.set("spark.executor.cores","2")
    conf.set("spark.executor.memories","1g")
    conf.set("spark.locality.wait","0")
    conf.set("spark.serializer","org.apache.spark.serializer.KryoSerializer")

    val spark: SparkSession = SparkSession
      .builder()
//      .master("local[*]")
//      .appName("DataFrame_JsonFileTest")
      .config(conf)
      .getOrCreate()

    // spark.conf.set("spark.executor.cores","2")
    // spark.conf.set("spark.executor.memories","1g")

    import spark.implicits._

    val df: DataFrame = spark
      .read
      .json("data/people.json")

    val resultDF: Dataset[Row] = df.where($"age" > 20)
    val resultDF1: Dataset[Row] = df.where("age>20")
    resultDF.show()
    resultDF1.show()

    // 将结果保存到csv文件中
    resultDF.write.format("csv").save("csvOutput")


    spark.stop()
  }
}
