package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SPARK_BRANCH, SparkConf, SparkContext}

object Demo11Partitioner {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("Demo11Partitioner")

    val sc = new SparkContext(conf)

    val studentsRDD: RDD[String] = sc.textFile("data/students.txt")

    val kvRDD: RDD[(String, Int)] = studentsRDD
      .map(_.split(","))
      .map { case Array(_, _, _, _, clazz) => (clazz, 1) }

    //使用自定义分区
    val countRDD: RDD[(String, Int)] = kvRDD.reduceByKey(new MyPartition(2), _ + _)

    countRDD.saveAsTextFile("data/clazz_num")
  }
}

class MyPartition(num: Int) extends Partitioner {
  //获取分区数
  override def numPartitions: Int = num

  //获取key属于哪一个分区
  override def getPartition(key: Any): Int = {
    val clazz: String = key.toString
    clazz.substring(0, 2) match {
      case "文科" => 0
      case "理科" => 1
    }
  }
}
