package value

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object groupbyTest {
  def main(args: Array[String]): Unit = {
    //1.创建SparkConf并设置App名称
    val conf = new SparkConf().setAppName("SparkCoreTest").setMaster("local[*]")

    //2.创建SparkContext，该对象是提交Spark App的入口
    val sc = new SparkContext(conf)

    val valueRdd: RDD[Int] = sc.makeRDD(1 to 10,2)

    val groupRdd: RDD[(Int, Iterable[Int])] = valueRdd.groupBy(_%4)

    val indexRdd: RDD[(Int, (Int, Iterable[Int]))] = groupRdd.mapPartitionsWithIndex((index,values) =>values.map((index,_)))

    indexRdd.collect().foreach(println)



    //关闭
    sc.stop();

    /**结果
     * (0,(0,CompactBuffer(4, 8)))
     * (0,(2,CompactBuffer(2, 6, 10)))
     * (1,(1,CompactBuffer(1, 5, 9)))
     * (1,(3,CompactBuffer(3, 7)))*/

  }

}
