package SparkRDD.RDD的缓存

import org.apache.commons.lang.StringUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

class cacheTest {


  @Test
  def test: Unit ={

    val conf = new SparkConf().setAppName("ip统计").setMaster("local[6]")
    val sc   = new SparkContext(conf)

    val resource = sc.textFile("src/main/scala/RDD的缓存/ip.txt")
    val ipRDD = resource.map( item => ( item.split(",")(0) , 1) )
    val cleanRDD = ipRDD.filter( item => StringUtils.isNotEmpty(item._1) )
    var aggRDD = cleanRDD.reduceByKey( (curr,agg) => curr + agg )

    // 调用cache方法将Transformation操作缓存
    aggRDD = aggRDD.cache()

    val maxRDD = aggRDD.sortBy( item => item._2,ascending = true ).first()   // Action操作1
    val minRDD = aggRDD.sortBy( item => item._2,ascending = false ).first()  // Action操作2
    println("max:"+maxRDD,"min:"+minRDD)

    /** 使用缓存
     * ResultStage 9 (first at cacheTest.scala:25) finished in 0.006 s
     * Job 3 finished: first at cacheTest.scala:25, took 0.032388 s
     */

    /** 不使用缓存
     * ResultStage 9 (first at test.scala:31) finished in 0.006 s
     * Job 3 finished: first at test.scala:31, took 0.048808 s
     */
  }
}
