package com.bclz.cat

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Spark 解决非唯一键 topN
  */
object CatStatistic {


  def main(args: Array[String]): Unit = {

    var topN = 10

    if (args.length < 2) {

      println("Usage: CatApplication [inputdir] [outputdir] {TopN}")
      return
    }
    if (args.length == 3) topN = args.apply(2).toInt


    val conf = new SparkConf()
    conf.setAppName("CAT STATISTIC")
//    conf.setMaster("local[*]")
    val sc = new SparkContext(conf)
    val textFile = sc.textFile(args apply 0)
    textFile.mapPartitions(lines => {

      lines.map(line => {
        val trim = line.trim
        val strings = trim.split(",")
        (strings.apply(0), (strings.apply(1), strings.apply(2).toInt))
      })

    }).reduceByKey((tu1, tu2) => (tu1._1, tu1._2 + tu2._2))

      .mapPartitions(ls => {
        //对每个归并后具有唯一键的map进行排序，避免对所有数据进行排序，内存溢出
        ls.toList.sortWith((tu1,tu2)=>tu1._2._2-tu2._2._2>0).take(10).iterator

      }).sortBy(tuple=>tuple._2._2,false,1)
      .map(tuple=>tuple._1+","+tuple._2._1+","+tuple._2._2)
      .saveAsTextFile(args.apply(1))



  }


}
