package com.dtkavin.spark.partation

import org.apache.spark.{SparkContext, SparkConf, Partitioner}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/14 0014
  * Time : 17:08
  * Discribtion : 自定义分区,demo,算法不好，只是实验，默认2个partition，如果key==str，则到partition1，否则到partition2
  */
case class SelfPartition(str: String, partNum: Int = 2) extends Partitioner {
  override def numPartitions: Int = partNum

  override def getPartition(key: Any): Int = {
    key match {
      case null => 0
      case string if string == str => 1
      case _ => 0
    }
  }
}

object SerfPartitionDemo {
  def main(args: Array[String]) {
    val conf = new SparkConf()
    val sc = new SparkContext("local", "PartitionDemo", conf)

    val data = sc.parallelize(List(("aaa" -> 1), ("aab" -> 2), ("bbb" -> 3)), 3)
//    data.partitionBy(SelfPartition("aaa")).foreachPartition { x => {
//      println(Thread.currentThread().getId + ". " + x.buffered)
//    }
//    }

    data.partitionBy(SelfPartition("aaa")).saveAsTextFile("D:\\data\\output-test")
    sc.stop()
  }
}