package org.example.operator.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

/**
 * rdd的算子，也就是rdd的方法
 *
 *
 */
object Spark14_RDD_Operator_ReduceByKey {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD")
    val context = new SparkContext(conf)
    // key value 类型
    val rdd1 = context.makeRDD(List(
      ("a",1),("a",2),("a",3),("b",4)),2)

    // 相同的key做聚合
    val reRdd: RDD[(String, Int)] = rdd1.reduceByKey((x,y)=>x+y)
    context.stop()
  }
}
