package spark.example

import org.apache.spark.SparkContext._
import org.apache.spark.{SparkConf,SparkContext}

object wordcount {
  def main(args: Array[String]): Unit = {
    //val conf = new SparkConf().setAppName("wordcount").setMaster("yarn").set("deploy-model","cluster")
    //val conf = new SparkConf().setAppName("wordcount").setMaster("spark://master:7077")
    val conf = new SparkConf().setAppName("wordcount1")
    val sc = new SparkContext(conf)
    val fileRDD = sc.textFile(args(0))
    val wordcount = fileRDD.flatMap(_.split(" ")).map(x => (x,1)).reduceByKey(_+_)
    val wordsort = wordcount.map(x =>(x._2,x._1)).sortByKey(false).map(x => (x._2,x._1))
    wordsort.saveAsTextFile(args(1))
    sc.stop()
  }

}
