package cn.hnu.spark


import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object RddDemo02 {
  def main(args: Array[String]): Unit = {
    //创建Rdd
    val conf: SparkConf = new SparkConf().setAppName("Rdd-demo").setMaster("local[2]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val lines: RDD[String] = sc.textFile("data/words.txt")
    val newLines: RDD[String] = lines.filter(StringUtils.isNoneBlank(_))
    //  \\s+  : 一个或多个空格
    val rdd1: RDD[String] = newLines.flatMap(_.split("\\s+"))
    val result: RDD[(String, Int)] = rdd1.map((_, 1)).reduceByKey(_ + _)

    //sink
    result.foreach(println)
    result.repartition(1).saveAsTextFile("data/out1")
//    result.coalesce(1).saveAsTextFile("data/out1")

  }

}
