package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo18SparkStandSubmit {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setAppName("submit")

    /**
     * 提交到集群运行需要注释master
     *
     */
    // conf.setMaster("local")

    val sc = new SparkContext(conf)

    val linesRDD: RDD[String] = sc.parallelize(List("java,spark,hadoop", "spark,hadoop,hadoop", "hadoop,java"))

    val wordsRDD: RDD[String] = linesRDD.flatMap(_.split(","))

    val kvRDD: RDD[(String, Int)] = wordsRDD.map((_, 1))

    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey(_ + _)

    countRDD.foreach(println)

    /**
     * 将项目打包上传到服务器
     *
     * 提交任务
     * spark-submit --class com.shujia.spark.core.Demo18SparkStandSubmit --master spark://master:7077 spark-1.0.jar
     *
     */


  }

}
