package cn.echcz.spark.examples

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 单词计数
  */
object WordCount {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("WordCount")
    val sc = new SparkContext(conf)
    // 读取文件路径为第一个参数
    val inputPath = args(0)
    // 结果输出路径为第二个参数
    val outputPath = args(1)
    val input: RDD[String] = sc.textFile(inputPath)
    // 行 => 单词
    val words: RDD[String] = input.flatMap(_.split("""\s"""))
    // 单词 => (单词, 1)
    val wordPair: RDD[(String, Int)] = words.map((_, 1))
    // (单词, 1) => (单词, 计数)
    val counts: RDD[(String, Int)] = wordPair.reduceByKey(_ + _)
    // 为文本文件保存在输出路径中
    counts.saveAsTextFile(outputPath)
  }
}
