package org.wj.arithmetic

import org.apache.spark.rdd.RDD
import org.wj.config.LocalSparkContext

object SortByKey extends App with LocalSparkContext{

  sc.setCheckpointDir("./")
  private val rdd: RDD[(Int, Any)] = sc.parallelize(Array((1, "2"), (2, "3"), (3, 4)))
  private val value: RDD[(Int, Iterable[Any])] = rdd.groupByKey()

  println(value.dependencies.head.rdd)

  value.cache()

  println(value.dependencies.head.rdd)

  value.checkpoint()

  println(value.dependencies.head.rdd)

  value.map(t2=>t2._1).foreach(println(_))

  println(value.dependencies.head.rdd)

}
