package com.shujia.opt

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo4FilterKey {
  def main(args: Array[String]): Unit = {

    //1、创建spark环境
    //创建配置文件对象
    val conf = new SparkConf()
    //指定spark执行默认，local：本地执行
    conf.setMaster("local")
    //spark 任务名
    conf.setAppName("wc")
    //创建spark上下文对象
    val sc = new SparkContext(conf)

    val linesRDD: RDD[String] = sc.textFile("spark/data/word")

    val wordsRDD: RDD[String] = linesRDD.flatMap(line => line.split(","))

//    val filterRDD: RDD[String] = wordsRDD.filter(word => !"null".equals(word))

    val kvRDD: RDD[(String, Int)] = wordsRDD.map(word => (word, 1))

    //    val groupByKeyRDD: RDD[(String, Iterable[Int])] = kvRDD.groupByKey(6)
    //
    //    val countRDD: RDD[(String, Int)] = groupByKeyRDD.mapValues(iter => iter.sum)
    //
    //    countRDD.foreach(println)

    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey((x, y) => x + y)

    countRDD.foreach(println)
  }
}
