package com.shengzai.rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo1WorldCount {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("WorldCount")
    val sc = new SparkContext(conf)

    val textRDD: RDD[String] = sc.textFile("hadoop_code/src/data/worlds.txt")

    val mapFlatRDD: RDD[String] = textRDD.flatMap(
      line => {
        val RDDSplit: Array[String] = line.split(" ")
        RDDSplit
      }
    )

    val mapRDD: RDD[(String, Int)] = mapFlatRDD.map(
      world => {
        (world, 1)
      }
    )

    val reduceByKeyRDD: RDD[(String, Int)] = mapRDD.reduceByKey(
      (x, y) => {
        x + y
      }
    )
    val ResRDD: RDD[String] = reduceByKeyRDD.map(
      (tuple2) => {
        s"${tuple2._1}\t${tuple2._2}"
      }
    )
    val resRDD2: RDD[String] = reduceByKeyRDD.map {
      case (world: String, count: Int) =>
        s"$world\t$count"
    }

    ResRDD.foreach(println)
    ResRDD.saveAsTextFile("spark_code/data/WorldCount")
  }
}
