package RDD

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

class Action算子 {
  val conf = new SparkConf()
    .setMaster("local[6]")
    .setAppName("Create_RDD")
  val sc = new SparkContext(conf)

  @Test
  def Reduce(): Unit = {
    val result: (String, Int) = sc.parallelize(Seq(("手机", 10), ("电脑", 15), ("手机", 20)))
      .reduce((curr, agg) => ("总价:", curr._2 + agg._2))
    println(result)
  }
  @Test
  def count_and_countByKey(): Unit ={
    val source: RDD[(String, Int)] = sc.parallelize(Seq(("a",1),("a",2),("b",1),("b",2),("b",5)))
    println("元素的个数:"+source.count())
    println(source.countByKey())
  }
}
