package test

import org.apache.spark.sql.SparkSession

object ee {

  val spark = SparkSession
    .builder()
    .master("local[*]")
    .getOrCreate()
  val sc = spark.sparkContext
  //键值对key 和value
  val score = sc.makeRDD(List(("张三",80),("里斯",72),("王五",88),("张三",77)))
  val score2 = sc.makeRDD(List(("张三",80),("里斯",72),("五",88),("张三",77)))
  //聚合
  score.reduceByKey((x,y) => x+y).foreach(println)
  //元素本身
  score.groupByKey().foreach(println)
  //合并，两个表中相同的key会依次匹配，不同的key就不会匹配
  //score.join(score2).foreach(println)
  //以左右为主，匹配不到则为none，能匹配则依次打印
  score.leftOuterJoin(score2).foreach(println)
  score.rightOuterJoin(score2).foreach(println)
  //联合
  score combineByKey(
    v => (v, 1),
    (t: (Int, Int), v) => {
      (t._1 + v, t._2 + v)
    },
    (t1: (Int, Int), t2: (Int, Int)) => {
      (t1._1 + t2._1, t1._2+ t2._2)
    }

  )



  spark.stop()
}
