package chapter03

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

object Test37_race {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val conf = new SparkConf().setAppName("race")
    val sc = new SparkContext(conf)
    //读取文件
    val input = args(0)
    val value = sc.textFile(input)
    //切分数据
    val value1 = value.map(e => e.split(","))
    val value2 = value1.map(e => e(4))
      .map(e => e.split("/"))
      .map(e => (e(0), e(1)))
      .map(e => (e._1 + "-" + e._2))
      .map(e => (e, 1))
      .reduceByKey(_ + _)
    val output = args(1)
    value2.partitionBy(new MyPar).saveAsTextFile(output)
  }
}
class MyPar extends Partitioner {
  override def numPartitions: Int = 2
  override def getPartition(key: Any): Int = {
    key match {
      case "2021-1" => 1
      case "2021-2" => 1
      case _ => 0
    }
  }
}
