package com.jscloud.spark.scalacount

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
//import scala.util.parsing.json.JSON

object ExternalSource {
  def main(args: Array[String]): Unit = {
    //程序入口类 SparkContext
    val sparkConf: SparkConf = new SparkConf().setAppName("testRdd").setMaster("local[2]")
    val sc: SparkContext = new SparkContext(sparkConf)

    val rdd1: RDD[String] = sc.textFile("file:///D:\\JSProjects\\jsCloud-bigdata-app\\sparkapp\\src\\main\\resources\\word.json")
    rdd1.foreach(a => println("===rdd1========>" + a))

    //字符串Map => JSONObject
    // val rdd2: RDD[JSONObject]  = rdd1.map(s => JSON.parseObject(s.toString))

    //获取JSON对象的key value
    //val rdd3 = rdd2.map(x => ((x.put("name", ""),(x.put("age", "")))))
    // val rdd3 = rdd2.map( JSON.parseFull(_))  作废不用了
    //rdd3.foreach(println(_))

    //    val json2 = JSON.parse("{\"a\":1,\"b\":2,\"c\":3}").asInstanceOf[JSONObject]
    //
    //    for ((k: String, v: Int) <- json2.asScala.asInstanceOf[mutable.Map[String, Int]]) {
    //      println(k + ":" + v)
    //    }

    val rdd4 = rdd1.flatMap(_.split(","))
    rdd4.foreach(println(_))

  }

}
