package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo13Submit {
  def main(args: Array[String]): Unit = {
    //1、创建spark的执行环境
    val conf = new SparkConf()
    //设置运行模式
    //conf.setMaster("local")
    conf.setAppName("wc")
    val sc = new SparkContext(conf)

    println("driver")

    //2、读取数据
    //RDD:弹性的分布式数据集（相当于List）
    val linesRDD: RDD[String] = sc.textFile("/data/lines.txt")
    println("driver")
    //一行转换多行
    val wordsRDD: RDD[String] = linesRDD.flatMap(_.split(","))

    println("driver")

    val kvRD: RDD[(String, Int)] = wordsRDD.map(word => {
      println("executor")
      (word, 1)
    })

    println("driver")

    //统计单词的数量
    val countRDD: RDD[(String, Int)] = kvRD.reduceByKey((x, y) => {
      println("executor")
      x + y
    })

    println("driver")

    //保存结果
    countRDD.saveAsTextFile("/data/word_count")
  }
}
