import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

/**
 * ClassName: CacheOp <br/>
 * Description: <br/>
 * date: 2020/8/4 17:54<br/>
 *
 * @author Hesion<br/>
 * @version
 * @since JDK 1.8
 */
class CacheOp {

  @Test
  def prepare(): Unit = {
    //创建sc
    val conf: SparkConf = new SparkConf().setMaster("local[6]").setAppName("cache_prepare")
    val sc = new SparkContext(conf)

    val sourceRDD: RDD[String] = sc.textFile("dataset/access_log_sample.txt");
    val countRDD: RDD[(String, Int)] = sourceRDD.map(item => (item.split(" ")(0), 1))
    val cleanRDD: RDD[(String, Int)] = countRDD.filter(item => StringUtils.isNotEmpty(item._1))
    val aggRdd: RDD[(String, Int)] = cleanRDD.reduceByKey((curr, agg) => curr + agg)
    val lessIp = aggRdd.sortBy(item=>item._2,ascending = true).first()
    val moreIp = aggRdd.sortBy(item=>item._2,ascending = false).first()
    println(lessIp)
    println(moreIp)
    sc.stop()
  }
}
