import org.apache.spark.sql.SparkSession

object E2_DFTest {


  def main(args: Array[String]): Unit = {
    testSource
  }


  def testSource = {
    val spark = SparkSession.builder().master("local[*]")
      .appName("sparkSession")
      .getOrCreate()
    val filePath = "file:///D:\\workspace\\lab\\learnbigdata\\learnspark\\sparksql\\src\\main\\resources\\person.json"
    spark.read.json(filePath).show
    //    spark.read.csv()
    //    spark.read.orc()
    //    spark.read.jdbc()
    //    spark.read.load()
    spark.read.format("json").load(filePath).show()
    spark.sql("select * from json.`" + filePath + "`").show()
//
    val df = spark.read.json(filePath)
    // 默认保存格式为 parquet
    val outFilePath = "file:///D:\\workspace\\lab\\learnbigdata\\learnspark\\sparksql\\src\\main\\resources\\output"
    df.write.format("csv").mode("overwrite").save(outFilePath)
    //
    // 可以指定为保存格式，直接保存，不需要再调用save了
//        df.write.json(outFilePath)
//        df.write.mode("append").json(outFilePath)
    df.createOrReplaceTempView("people")
    df.sqlContext.sql("SELECT name FROM people WHERE age BETWEEN 13 AND 19").show()

    spark.stop
  }


}
