package com.gin.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object L02_Union {

  /*
  面向数据集操作
  1.带函数的非聚合: map, flatmap
  2.单元素: union, cartesian 没有函数计算
  3.kv元素: cogroup, join 没有函数计算
  4.排序
  5.聚合计算: reduceByKey 有函数 combinerByKey
  * */
  def main(args: Array[String]): Unit = {
    //加载配置 获取spark上下文对象
    val conf = new SparkConf().setMaster("local").setAppName("L01")
    val sc = new SparkContext(conf)
    //只打印错误日志
    sc.setLogLevel("ERROR")

    //数据源
    val data1RDD: RDD[Int] = sc.parallelize(List(1, 2, 3))
    val data2RDD: RDD[Int] = sc.parallelize(List(2, 3, 4))
    println("data1RDD 分区数=" + data1RDD.partitions.size)
    println("data2RDD 分区数=" + data2RDD.partitions.size)

    //逻辑上合并了两个rdd
    val unionRes: RDD[Int] = data1RDD.union(data2RDD)
    println("unionRes 分区数=" + unionRes.partitions.size)
    unionRes.foreach(println)
    println("-------- union --------")
    println()

    //笛卡尔积(data1每一个元素和data2每一个元素拼接一次)
    val cartesianRes: RDD[(Int, Int)] = data1RDD.cartesian(data2RDD)
    cartesianRes.foreach(println)
    println("-------- cartesian --------")
    println()

    //交集
    val intersectionRes: RDD[Int] = data1RDD.intersection(data2RDD)
    intersectionRes.foreach(println)
    println("-------- intersection --------")
    println()

    //差集, 有方向性
    val subtractRes: RDD[Int] = data1RDD.subtract(data2RDD)
    subtractRes.foreach(println)
    println("-------- subtract --------")
    println()



    val kv1RDD: RDD[(String, Int)] = sc.parallelize(List(
      ("P1", 11),
      ("P1", 12),
      ("P2", 21),
      ("P3", 31)
    ))
    val kv2RDD: RDD[(String, Int)] = sc.parallelize(List(
      ("P1", 13),
      ("P1", 14),
      ("P3", 32),
      ("P4", 41)
    ))
    //cogroup 分组聚合
    val cogroupRDD: RDD[(String, (Iterable[Int], Iterable[Int]))] = kv1RDD.cogroup(kv2RDD)
    cogroupRDD.foreach(println)
    println("-------- cogroup --------")
    println()

    //join 实际是调用了 cogroup,然后使用了 flatmap 进行平铺
    val joinRDD: RDD[(String, (Int, Int))] = kv1RDD.join(kv2RDD)
    joinRDD.foreach(println)
    println("-------- join --------")
    println()

    //左关联, 基于 cogroup
    val leftOuterJoinRDD: RDD[(String, (Int, Option[Int]))] = kv1RDD.leftOuterJoin(kv2RDD)
    leftOuterJoinRDD.foreach(println)
    println("-------- leftOuterJoin --------")
    println()

    //右关联, 基于 cogroup
    val rightOuterJoinRDD: RDD[(String, (Option[Int], Int))] = kv1RDD.rightOuterJoin(kv2RDD)
    rightOuterJoinRDD.foreach(println)
    println("-------- rightOuterJoin --------")
    println()

    //全连接, 基于 cogroup
    val fullOuterJoinRDD: RDD[(String, (Option[Int], Option[Int]))] = kv1RDD.fullOuterJoin(kv2RDD)
    fullOuterJoinRDD.foreach(println)
    println("-------- fullOuterJoin --------")
    println()





  }

}
