package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *  standalone
 *    - client
 *    - cluster
 *
 *  yarn
 *    - client
 *    - cluster
 */
object WordCount3 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("测试服务器提交单词统计")
//    conf.setMaster() // 在服务器中运行，这句话不需要设置

    val sc = new SparkContext(conf)

    val lineRDD: RDD[String] = sc.parallelize(List("hello,world,hadoop", "hello,spark,hadoop", "flink,hive,hadoop", "hbase,world,hive"))

    val resRDD: RDD[(String, Int)] = lineRDD.flatMap(_.split(","))
      .map((_, 1))
      .reduceByKey(_ + _)

    resRDD.foreach(println)

  }
}
