package com.yjjxt

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Hello12Cogroup {
  def main(args: Array[String]): Unit = {
    val sparkContext = new SparkContext((new SparkConf().setMaster("local").setAppName("Join" + System.currentTimeMillis())))
    val array1 = Array[String]("Hello1 user11", "Hello1 user12", "Hello user11", "Hello user12", "user1 apple1")
    val array2 = Array[String]("Hello1 user21", "Hello1 user22", "Hello user21", "Hello user22", "user2 apple2")
    val lines1 = sparkContext.parallelize(array1, 3)
    val lines2 = sparkContext.parallelize(array2, 4)
    val words1 = lines1.map(ele => (ele.split(" ")(0), ele.split(" ")(1)))
    val words2 = lines2.map(ele => (ele.split(" ")(0), ele.split(" ")(1)))

    //开始连接
    val words: RDD[(String, (Iterable[String], Iterable[String]))] = words1.cogroup(words2)
    println("分区数=》" + words.getNumPartitions)

    words.foreach(ele => {
      println("key------------------" + ele._1)
      println("v1----" + ele._2._1.toList)
      println("v2----" + ele._2._2.toList)
    })


  }
}
