package com.imooc.spark

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * Created by zghgchao 2017/12/22 10:09
  * SQLContext的使用
  *
  */
object SparkContextApp {
  def main(args: Array[String]): Unit = {
    SetLogger()
    if (args.length != 1) {
      println("Usage:Path must Input!")
      System.exit(0)
    }
    var path = args(0)

    //1)创建相应的Context
    val sparkConf = new SparkConf()
//      .setMaster("local[2]").setAppName("SparkContextApp")
    val sc = new SparkContext(sparkConf)
    //Spark 1.x中SparkSQL入口
    val sqlContext = new SQLContext(sc)

    //2)相关业务处理
    //    sqlContext.read.format("json").load("")
    val people = sqlContext.read.json(path)
    people.printSchema()
    people.show()

    //3）关闭资源
    sc.stop()
  }

  def SetLogger() = {
    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("com").setLevel(Level.OFF)
    System.setProperty("spark.ui.showConsoleProgress", "false")
    Logger.getRootLogger().setLevel(Level.OFF);
  }

  /**
    * maven打包 ：mvn clean package -DTestskip
    * 提交Spark Application到环境中运行
        spark-submit \
        --name SQLContextApp \
        --class com.imooc.spark.SQLContextApp \
        --master local[2] \
        /home/hadoop/lib/sql-1.0.jar \
        /home/hadoop/app/spark-2.1.0-bin-2.6.0-cdh5.7.0/examples/src/main/resources/people.json
     */
}
