package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo12Wc {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setAppName("group")

    conf.setMaster("local")

    val sc = new SparkContext(conf)


    //读取文件
    val linesRDD: RDD[String] = sc.textFile("data/words.txt")

    //将一行转换成多行
    val wordsRDD: RDD[String] = linesRDD.flatMap(line => line.split(","))

    //转换成kv格式，
    val kvRDD: RDD[(String, Int)] = wordsRDD.map(word => (word, 1))

    //统计单词的数量
    //对同一个单词后面的1进行加和
    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey((x, y) => x + y)

    countRDD.foreach(println)

  }

}
