package spark.freedomstart

import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 以下程序可以直接使用java指令启动，不需要使用spark-submit启动，也能让spark程序提交到yarn中。
  * 脱离spark-submit脚本和SparkSubmit类，能够令spark程序更好的镶嵌到其他大型程序中。
  */
object StartSparkWithoutSparkSubmit {

  System.setProperty("SPARK_SUBMIT","true")
  System.setProperty("spark.jars","file:/root/IdeaProjects/java-scala-practice/target/java-scala-practice-1.0.jar")
  System.setProperty("spark.submit.deployMode","client")
  System.setProperty("spark.master","yarn-client")

  def main(args: Array[String]) {
    val sparkConf = new SparkConf().setAppName("local-yarn-test")
    val sc = new SparkContext(sparkConf)
    val hc = new HiveContext(sc)
    hc.sql("select * from pokes").show()
  }

}
