package com.offcn.spark.p4

import org.apache.spark.{Partitioner, SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @Auther: BigData-LGW
 * @ClassName: PartitionFinal
 * @Date: 2020/12/8 19:43
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object PartitionFinal {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setAppName("PartitionFinal")
            .setMaster("local[2]")
        val sc = new SparkContext(conf)

        val lines = sc.parallelize(List(
            "chinese ls 91",
            "english ww 56",
            "chinese zs 90",
            "math zl 76",
            "english zq 88",
            "chinese wb 95",
            "chinese sj 74",
            "english ts 87",
            "math ys 67",
            "english mz 77",
            "chinese yj 98",
            "english gk 96",
            "math zq 88",
            "chinese wb 95",
            "math sj 74",
            "english ts 87",
            "math ys 67",
            "english mz 77",
            "math yj 98",
            "english gk 96"
        ))
        val coure2Info = lines.map(line => {
            val index = line.indexOf(" ")
            val course = line.substring(0,index)
            val info = line.substring(index + 1)
            (course,info)
        })
        val keys = coure2Info.keys.distinct().collect()
        coure2Info.partitionBy(new MyGroup(keys))
            .saveAsTextFile("file:/D:/tmp/spark/partition/out")
        sc.stop()
    }
}
class MyGroup(keys: Array[String]) extends Partitioner{
    val key2Partition = {
        val map = mutable.Map[String,Int]()
        for (i <- 0 until keys.length){
            map.put(keys(i),i)
        }
        map
    }
    override def numPartitions: Int = keys.length

    override def getPartition(key: Any): Int = {
        key2Partition.getOrElse(key.toString,0)
    }
}