package com.gy.spark.core

import org.apache.spark.{SparkConf, SparkContext}

object Day02 {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName(Day02.getClass.getSimpleName).setMaster("local")
    val sc = new SparkContext(conf)

    val rdd = sc.parallelize(Array(
      ("zhangsan", 18),
      ("lisi", 19),
      ("wangwu", 20),
      ("zhangsan", 18),
      ("lisi", 19),
      ("wangwu", 20),
      ("maliu", 21)),
      3)
    rdd.mapPartitions(x => x.map(_ + "_").toIterator).foreachPartition(_.foreach(println))

    rdd.mapPartitionsWithIndex((index, iter) => {
      iter.map(x => s"${x}:${index}").toIterator
    }).foreachPartition(_.foreach(println))


    val rdd2 = sc.makeRDD(Array(("zhangsan", 18), ("lisi", 19), ("wangwu", 20), ("maliu1", 21)))

    rdd2.intersection(rdd).foreachPartition(_.foreach(println))

    val rdd3 = sc.parallelize(Array(
      (1, "a"), (2, "b"), (3, "c"), (3, "d")
    ), 3);
    val rdd4 = sc.parallelize(Array(
      "love1", "love2", "love3", "love4",
      "love5", "love6", "love7", "love8",
      "love9", "love10", "love11", "love12"
    ), 3);

    rdd3.countByKey().foreach(println)

    rdd3.zipWithIndex().foreach(x => {
      val k1 = x._1._1
      val k2 = x._1._2
      println(s"${k1}:${k2}:${x._2}")
    })

    sc.stop()
  }

}
