package com.atguigu.sparkcore.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Author atguigu
 * Date 2020/10/28 9:44
 */
object ZipDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("DoubleValueDemo").setMaster("local[2]")
        val sc: SparkContext = new SparkContext(conf)
        val list1 = List(30, 50, 70, 60, 10, 20)
        val list2 = List(3, 5, 7, 6, 1, 2, 0, 10, 20)
        val rdd1: RDD[Int] = sc.parallelize(list1, 2)
        val rdd2: RDD[Int] = sc.parallelize(list2, 2)
        
        //        val rdd3 = rdd1.zip(rdd2)
//        val rdd3: RDD[(Int, Long)] = rdd1.zipWithIndex()
//        println(list1.zip(list2))
//        println(list1.zipAll(list2, "100", "200"))
        
//        val rdd3 = rdd1.zip(rdd2)
        
        // 只要两个rdd的分区一样就可以了.
        /*val rdd3 = rdd1.zipPartitions(rdd2)((it1, it2) => {
//            it1.zip(it2)
//            it1.zipAll(it2, 100, 200)
            it1.zipWithIndex.zip(it2.zipWithIndex)
        }).map{
            case ((a, i1), (b, i2)) => (a, i1, b, i2)
        }*/
    
        val rdd3: RDD[(Int, Long)] = rdd1.zipWithUniqueId()
        
        rdd3.collect.foreach(println)
        
        
        sc.stop()
    }
}
/*
zip限制:
1. 对应的分区的元素个数必须一样
2. rdd的分区个数必须相等

 */