package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

object Demo14Studentpartition {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("partition")

    val sc = new SparkContext(conf)

    val student: RDD[String] = sc.textFile("data/students.txt", 2)

    println("student分区数据：" + student.getNumPartitions)

    /**
      * 将文科和理科分别保存到不同文件中
      *
      */

    val clazzPartionRDD: RDD[(String, Iterable[String])] = student.groupBy((stu: String) => stu, new ClassPartition)

    println("clazzPartionRDD分区数：" + clazzPartionRDD.getNumPartitions)


    clazzPartionRDD.map(_._1).saveAsTextFile("data/clazz")

  }

}

class ClassPartition extends Partitioner {
  override def numPartitions: Int = 2

  override def getPartition(key: Any): Int = {
    val clazz: String = key.toString.split(",")(4)

    if (clazz.startsWith("文科")) {
      0
    } else {
      1
    }

  }
}
