package base_spark

import org.apache.spark.{SparkConf, SparkContext}

object WorldCount {

    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
        conf.setAppName("localWorldCount")
            .setMaster("local[*]")
        val sc: SparkContext = new SparkContext(conf)

        val fileName: String = "file:///D:\\rendertron\\node_modules\\_node-forge@0.7.6@node-forge\\README.md"
        sc.textFile(fileName)
            .flatMap(line => line.split(" "))
            .map((_, 1))
            .reduceByKey((_ + _))
            .map(item=>(item._2,item._1))
            .filter(_._1 > 100)
            .sortByKey(ascending = false)
            .collect()
            .foreach(println)
    }


}
