package com.shengzai.rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo2Partition {
  def main(args: Array[String]): Unit = {

    //创建spark环境
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("partition")
    val sc = new SparkContext(conf)

    //读取文件
    val linesRDD: RDD[String] = sc.textFile("data/words")

    println(s"linesRDD分区数：${linesRDD.getNumPartitions}")

    //展开
    val wordsRDD: RDD[String] = linesRDD.flatMap(_.split(','))
    println(s"wordsRDD分区数：${wordsRDD.getNumPartitions}")

    //转换成kv格式
    val kvRDD: RDD[(String, Int)] = wordsRDD.map((_, 1))
    println(s"kvRDD分区数：${kvRDD.getNumPartitions}")
    //统计单词的数量
    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey(_ + _)
    println(s"countRDD分区数：${countRDD.getNumPartitions}")
    //整理数据
    val resultRDD: RDD[String] = countRDD.map {
      case (word: String, count: Int) =>
        s"$word\t$count"
    }
    println(s"resultRDD分区数：${resultRDD.getNumPartitions}")

    //保存结果
    resultRDD.saveAsTextFile("data/wc")

    while (true){
    }
  }
}
