package weibo

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
//
object AverageSend {
  def main(args: Array[String]): Unit = {
//    val sc:SparkContext = new SparkContext("local[*]","AverageSend")
    val conf: SparkConf = new SparkConf().setAppName("AverageSend")
    val sc:SparkContext = new SparkContext(conf)

    val inputPath:String = "file/WeiBoData/input"
    val outputPath:String = "file/WeiBoData/output/TopSend"

//    val dataRdd:RDD[(String,Int)] = sc.textFile(inputPath)
    val dataRdd:RDD[(String,Int)] = sc.textFile(args(0))
      .map(x => {
        (x.split("\t")(0),1)
      })


    //总数
//    val count = dataRdd.count()
    //人数
//    val person = dataRdd.reduceByKey(_ + _).count()
    //前N
    val dataMap = dataRdd
      .reduceByKey(_ + _)
//      .sortBy(_._2,ascending = false)
//      .take(10)

//    dataMap.foreach(println)
      dataMap.saveAsTextFile(args(1))

//    val aveSend:Double = count*1.0/person*1.0
//    println(aveSend,person,count)
    sc.stop()

  }

}
