package org.example
import org.apache.spark.sql.SparkSession
object data2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
    val sc = spark.sparkContext
    val data1 = sc.makeRDD(List(("张三", 2000), ("李四", 2500), ("王五", 5000), ("张三", 3600)))
    val data2 = sc.makeRDD(List(("张三", 3600), ("李四", 3500), ("王五", 6000), ("张三", 4600)))
    //data1.reduceByKey((x, y) => x + y).foreach(println)
    //data1.groupByKey().foreach(println)
    //data1.join(data2).foreach(println)
    data1.leftOuterJoin(data2).foreach(println)
    data1.rightOuterJoin(data2).foreach(println)
    data1.combineByKey(
      v => (v, 1),
        (t:(Int,Int),v) => {
        (t._1 + v, t._2 + 1)
        },
      (t1:(Int,Int),t2:(Int,Int)) => {
        (t1._1 + t2._1, t1._2 + t2._2)
      }
          ).foreach(println)
      data1.aggregateByKey(0)(math.max(_,_),_+_).foreach(println)
    sc.stop()
  }
    }

