package cn.darksoul3.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object WordCount {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
      .setAppName("word count in spark")
      .setMaster("local[*]")

    val sparkContext = new SparkContext(sparkConf)

    val lines: RDD[String] = sparkContext.textFile(args(0))

    val words: RDD[String] = lines.flatMap(_.split("\\s+"))

    val mapping: RDD[(String, Int)] = words.map((_,1))

    val reduced: RDD[(String, Int)] = mapping.reduceByKey(_+_)

    val sorted: RDD[(String, Int)] = reduced.sortBy(_._2,ascending = false)

    sorted.saveAsTextFile(args(1))

    sparkContext.stop()
  }

}
