package com.bclz.environment

import org.apache.spark.{ Partitioner, SparkConf, SparkContext}


class CustomPartitioner(nums:Int ) extends Partitioner  {
  require(nums >= 0, s"Number of partitions ($nums) cannot be negative.")
  override def numPartitions: Int = nums

  override def getPartition(key: Any): Int = key match {

    case e:EnvironmentKey=> Math.abs((e.year.apply(0).hashCode&Int.MaxValue)%numPartitions)
    case _=>throw new IllegalArgumentException("required  EnvironmentKey Class !!!")
  }

  override def equals(obj: scala.Any): Boolean = obj match {

    case p:CustomPartitioner=>p.numPartitions==this.numPartitions
    case _=> false

  }
}

/**
  * 二次排序
  * Spark实现, 输入各年的环境温度( year,month,date,temperature)，
  * 按照 year-month temperature1,temperature2...格式输出,并且按照年+月排序，如果相同则按温度排序
  */
object EnvironmentStatistic {


  def main(args: Array[String]): Unit = {

    if (args.length != 2)
      throw new IllegalArgumentException("Usage: EnvironmentApplication [inputdir] [outputdir]")

//      val master="local[*]"
//      val appName="ENVIRONMENT STATISTIC"
//      val conf=new SparkConf().setMaster(master).setAppName(appName)

//    val master="local[*]"
      val appName="ENVIRONMENT STATISTIC"
      val conf=new SparkConf().setAppName(appName)

      val sc=new SparkContext(conf)
      val textFile = sc.textFile(args.apply(0))
      textFile.mapPartitions(lines=>{
        lines.map(s=>{
          val strings = s.split(",")
          (new EnvironmentKey(strings.apply(0),strings.apply(1),strings.apply(3).toInt),strings.apply(3))

        })

      }).repartitionAndSortWithinPartitions(new CustomPartitioner(2))
        .mapPartitions(lines=>{

          lines.map(e=>e._1.year+"-"+e._1.month+"\t"+e._2)

        }).saveAsTextFile(args.apply(1))

      sc.stop()

  }

}
