package com.example.bigdata.spark.SparkTest

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.parsing.json.JSON

object ChapterDemo {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[1]").setAppName(ChapterDemo.getClass.getSimpleName)
    Logger.getLogger("org.apache.spark").setLevel(Level.OFF)
    val sc = new SparkContext(conf)
    //读取普通文件(sc)
    读取json数据(sc)

    sc.stop()
  }

  def 读取普通文件(sc:SparkContext): Unit ={
    val inputTextFile = sc.textFile("logs/capacity.log")
    println(inputTextFile.collect.mkString(":"))
    inputTextFile.foreach(x =>
      // 空格切分字符串，循环打印数组
      x.split(" ").foreach(y => {
        println("y = " + y)
      })
      )
  }
  def 读取json数据(sc:SparkContext): Unit ={
    val inputJsonFile = sc.textFile("input/info.json")
    val content = inputJsonFile.map(JSON.parseFull)
    println("----"+content.collect.mkString("\t"))
    content.collect.foreach(x => println("x : " + x))
    content.foreach({
      //case Some(map: Map[String,Any]) => println(map)
      case None => println("无效的Json")
      case  _ => println("其他异常")
    })
  }

}
