package com.hzh.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo18SparkSubmit {
  def main(args: Array[String]): Unit = {
    /**
     * 建立连接
     * 提交到集群运行需要注释setMaster
     *
     */
    val conf = new SparkConf()
    conf.setAppName("Demo18SparkSubmit")
    //    conf.setMaster("local")
    val sc = new SparkContext(conf)
    val valuesRDD: RDD[String] = sc.parallelize(List("java,java,hadoop", "spark,scala,java", "hadoop,hadoop,scala"))
    valuesRDD.flatMap(_.split(","))
      .map {
        word: String =>
          (word, 1)
      }
      .reduceByKey(_ + _)
      .foreach(println)

    /**
     * 将本地的代码打成jar包提交到集群运行
     * spark-submit --class com.hzh.spark.core.Demo18SparkSubmit --master spark://master:7077 ./spark-test-jar/spark-1.0.jar
     *
     *
     */
  }
}
