package cn.lzd.demo1

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by lzd on 2018/12/5.
  */
object WordCount {

  def main(args: Array[String]) {

    //创建SparkConf()并设置App名称
    val conf = new SparkConf().setAppName("scala-wdCount").setMaster("local")

    //创建SparkContext，该对象是提交spark App的入口
    val sc = new SparkContext(conf)

    //使用sc创建RDD并执行相应的transformation和action
    //sc.textFile("/G:\\mr_profile\\spark\\wdcount.txt").flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _).saveAsTextFile("G:\\mr_profile\\spark\\out")

    val lines: RDD[String] = sc.textFile("/G:\\mr_profile\\spark\\wdcount.txt")
    val words: RDD[String] = lines.flatMap(_.split(" "))
    val wordAndOne: RDD[(String, Int)] = words.map((_,1))
    val reduce: RDD[(String, Int)] = wordAndOne.reduceByKey(_+_)
//    val sortBy: RDD[(String, Int)] = reduce.sortBy(_._2)
    val sortBy: RDD[(String, Int)] = reduce.sortBy(_._2,false)
    sortBy.saveAsTextFile("G:\\mr_profile\\spark\\out2")


    //停止sc，结束该任务
    sc.stop()
  }
}
