package cn.jly.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * @author lanyangji
 * @date 2019/12/1 21:55
 */
object SparkSql07_JSON {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .config(new SparkConf())
      .master("local[*]")
      .appName("SparkSql07_JSON")
      .getOrCreate()
    
    import spark.implicits._

    val df: DataFrame = spark.read.format("json").load("input/people.json")
    df.show()

    // 第二种load json的方式
    val otherPeopleDataset: Dataset[String] = spark.createDataset(
      """{"name":"tom","address":{"city":"beijing","street":"wangfujing"}}""" :: Nil
    )

//    val otherPeople: Any = spark.read.json(otherPeopleDataset)
//    otherPeople.

    spark.close()
  }
}
