package org.example

import org.apache.spark.sql.SparkSession

object sparkData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    val score = sc.makeRDD(List(("张三",80),("李四",72),("王五",88),("张三",77)))
    val score2 = sc.makeRDD(List(("张三",85),("赵四",79),("王五",88),("张三",78)))
//    score.reduceByKey((x,y) => x + y).foreach(println)
//    score.groupByKey().foreach(println)
//
//    score.join(score2).foreach(println)
    score.leftOuterJoin(score2).foreach(println)
    score.rightOuterJoin(score2).foreach(println)

//    score.combineByKey(
//      v => (v,1),
//      (t:(Int,Int), v) => {
//        (t._1 + v, t._2 + v)
//      },
//      (t1:(Int,Int))

  }
}
