package com.bdqn.spark.chapter05.kv

import org.apache.spark.rdd.RDD
import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

object Spark17_RDD_Operator_Transform {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("operator-partitionBy")
    val sc = new SparkContext(sparkConf)

    val sourceRDD: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4), 2)

    // (1 2) (3 4)
    val kvRDD: RDD[(Int, Int)] = sourceRDD.map((_, 1))

    // 隐式转换（二次编译）
    kvRDD.partitionBy(new HashPartitioner(2)).saveAsTextFile("output")

    sc.stop()
  }
}
