package com.simon.spark

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
object WorldCount {
  def main(args: Array[String]): Unit = {
   // val conf=new SparkConf().setAppName("mySpark").setMaster("local[1]")
    val conf=new SparkConf().setAppName("mySpark").setMaster("local[2]")
    val sc=new SparkContext(conf)
    //val file:RDD[String]=sc.textFile("hdfs://node2:9000/11.log")
    val file:RDD[String]=sc.textFile(args(0))
     val resultSort:RDD[(String,Int)]=     file.flatMap(_.split(" ")).map(x=>((x,1))).reduceByKey(_+_)
          .sortBy(_._2,false)
    //resultSort.foreach(println)
   // resultSort.saveAsTextFile("d://outspark")
    //resultSort.saveAsTextFile("hdfs://node2:9000/outspark2")
    resultSort.saveAsTextFile(args(1))
  }
}
