package com.inspur2

import org.apache.spark.{Partitioner, SparkConf, SparkContext}

object ewDivisionChampions { //冠军次数统计 东西分区

  class  MySparkPartition(numParts: Int) extends Partitioner {

    override def numPartitions: Int = numParts

    /**
      * 可以自定义分区算法
      * @param key
      * @return
      */
    override def getPartition(key: Any): Int = {
      if(key == "East")
        0
      else if(key == "West")
        1
      else
        2
    }
    override def equals(other: Any): Boolean = other match {
      case mypartition: MySparkPartition =>
        mypartition.numPartitions == numPartitions
      case _ =>
        false
    }
    override def hashCode: Int = numPartitions

  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      //      .setMaster("spark://192.168.66.88:7077")
      .setMaster("local")
      .setAppName("ewDivision Champions count")
    val sc = new SparkContext(conf)

    val champions = sc.textFile("hdfs://192.168.66.88:8020/0622/clear_data/part-00000")

    champions
      .map(_.split(","))
      .map(x=>{
        val year = x(0).toInt
        val arr:Array[String] = Array()
        if(year < 1970)
          (arr :+ x(0) :+ x(1) :+ x(2) :+ x(3) :+ x(4) :+ "year<1970")
        else
          x
      })
      .map(x=>((x(2), x(5)), 1))
      .reduceByKey(_+_)
      .sortBy(x => x._2, false)
      .map(x => (x._1._2, (x._1._1, x._2)))
      .partitionBy(new MySparkPartition(3))
      .map(x=>x._1 + "\t" + x._2._1 + "\t" + x._2._2)
//    .foreach(println)
      .saveAsTextFile("hdfs://192.168.66.88:8020/0622/nba_ewDivision_champions_number")
  }

}
