package com.yjjxt

import org.apache.spark.api.java.JavaRDD.fromRDD
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer

/**
 * ZIP前后的两个RDD需要元素个数相同
 *
 * ZIPPartiton-->合并数量不同的RDD
 */
object Hello19Zip {
  def main(args: Array[String]): Unit = {
    val sparkContext = new SparkContext((new SparkConf().setMaster("local").setAppName("Join" + System.currentTimeMillis())))

    val array1 = Array[String]("user1", "user2", "user3", "user4", "user5", "user6", "user7", "user8")
    val array2 = Array[String]("1", "2", "3", "4", "5", "6", "7", "8")
    val array3 = Array[String]("1", "2", "3", "4", "5", "6", "7", "8", "11", "22", "33", "44", "55", "66", "77", "88")
    val array4 = Array[String]("1", "2", "3", "4")
    val lines1 = sparkContext.parallelize(array1, 4)
    val lines2 = sparkContext.parallelize(array2, 4)
    val lines3 = sparkContext.parallelize(array3, 4)

    val linesZip12: RDD[(String, String)] = lines1.zip(lines2)
    linesZip12.foreach(println)

    val linesZip13: RDD[(String, String)] = lines1.zipPartitions(lines3)((x: Iterator[String], y: Iterator[String]) => {
      val list = ListBuffer[(String, String)]()
      val listX = x.toList
      val listY = y.toList
      for (i <- 0 until listX.size) {
        list.+=(Tuple2(listX(i), listY(i)))
      }
      list.iterator
    })
    linesZip13.foreach(println)

    sparkContext.stop()
  }
}
