package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo18Standalone {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName("standalone集群运行")
    val sc: SparkContext = new SparkContext(conf)

    //统计单词个数
    val rdd1: RDD[String] = sc.parallelize(List("hive|java|hello|world", "hive|java|hadoop|world", "hive|spark|hello|hadoop"))
    rdd1.flatMap(_.split("\\|"))
      .map((_,1))
      .reduceByKey(_+_)
      .foreach(println)

    /**
     * standalone
     *  - client模式提交命令：
     *    spark-submit --class com.shujia.core.Demo18Standalone --master spark://master:7077 --executor-memory 512m --total-executor-cores 1 spark-1.0.jar 10
     *
     *  - cluster模式提交命令：
     *    spark-submit --class com.shujia.core.Demo18Standalone --master spark://master:7077 --executor-memory 512M --total-executor-cores 1 --deploy-mode cluster spark-1.0.jar 10
     *
     *
     */
  }
}
