package com.dongline.te

import javafx.application.Application
import javafx.stage.Stage
import org.apache.spark.{SparkConf, SparkContext}
/*


对rdd进行缓存和ck，先缓存就会先进行
 */
class WordCount extends  Application{
  override def start(primaryStage: Stage): Unit = {
    val conf = new SparkConf().setAppName("wordcount").setMaster("local")

    val systemMemory = conf.getLong("spark.testing.memory", Runtime.getRuntime.maxMemory)
    conf.set("spark.testing.memory", "471859200")
    val sc = new SparkContext(conf)
    sc.setCheckpointDir("D:/check")
    val rdd1 = sc.textFile("D://wordcount.txt").flatMap(x => x.split(" "))
    val rdd2 =  rdd1.map(x => (x , 1))
    val rdd3 = rdd2.groupBy(x => x._1).map(x => (x._1,x._2.size))

    rdd1.cache()
    rdd1.checkpoint()
    val rdd4 =  rdd3.sortBy(x => x._2,false)

    println(rdd4.collect.toBuffer)
  }
}
