package com.yjjxt

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Hello20GroupByKey {
  def main(args: Array[String]): Unit = {
    val sparkContext = new SparkContext((new SparkConf().setMaster("local").setAppName("Join" + System.currentTimeMillis())))
    val array = Array[String]("user1 1", "user2 2", "user1 3", "user2 4", "user3 5", "user3 6")
    val lines = sparkContext.parallelize(array, 1)

    val pairs: RDD[(String, String)] = lines.map(x => (x.split(" ")(0), x.split(" ")(1)))

    val group: RDD[(String, Iterable[String])] = pairs.groupByKey(2)
    group.mapPartitionsWithIndex((idx, iter) => {
      //遍历一下
      while (iter.hasNext) {
        println(idx + "-group-" + iter.next())
      }
      iter
    }).count()

    val reduce: RDD[(String, String)] = pairs.reduceByKey((x, y) => {
      x.concat("[" + y + "]")
    })
    reduce.mapPartitionsWithIndex((idx, iter) => {
      //遍历一下
      while (iter.hasNext) {
        println(idx + "-reduce-" + iter.next())
      }
      iter
    }).count()

  }
}
