package com.atguigu.bigdata.spark

import org.apache.spark.{SparkConf, SparkContext}
import scala.util.parsing.json.JSON
//创建自定义分区
object Spark02_json13{

  def main(args: Array[String]): Unit = {
    //创建SparkConf
    //s设定spark计算框架的运行环境
    val config: SparkConf = new SparkConf().setMaster("local[*]").setAppName("wordCount")
    //创建Spark上下文环境
    val sc = new SparkContext(config)

    val json = sc.textFile("in/name.json")

    val result  = json.map(JSON.parseFull)
    result.foreach(println)
    sc.stop()
  }
}
