package com.atguigu1.core.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * @description: 双rdd操作,交差并类型要一致如果类型不一样则报错,拉链操作数据类型可以不一致
 * @time: 2021-03-12 11:45
 * @author: baojinlong
 **/
object Spark17DobuleValueRdd {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("rdd")
    // 设置rdd分区数字
    val sparkContext = new SparkContext(conf)
    // 排序后分区数量不变
    val rddValue1: RDD[Int] = sparkContext.makeRDD(Seq(1, 2, 3, 4), 2)
    val rddValue2: RDD[Int] = sparkContext.makeRDD(Seq(3, 4, 6, 5), 2)
    // 交集
    val rddValue3: RDD[Int] = rddValue1.intersection(rddValue2)
    println(rddValue3.collect.mkString(","))
    // 并集
    val rddValue4: RDD[Int] = rddValue1.union(rddValue2)
    println(rddValue4.collect.mkString(","))
    // 差集
    val rddValue5: RDD[Int] = rddValue1.subtract(rddValue2)
    println(rddValue5.collect.mkString(","))
    // 拉链:注意两个数据源分区数必须一致,同时数据个数也必须要一样 Can oly zip RDDs with same number of elements in each partition
    val rddValue6: RDD[(Int, Int)] = rddValue1.zip(rddValue2)
    println(rddValue6.collect.mkString(","))

    sparkContext.stop()
  }
}
