package weibo

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object BanKeyNum {
  def main(args: Array[String]): Unit = {
//    val sc: SparkContext = new SparkContext("local[*]", "BanKeyNum")

    val conf: SparkConf = new SparkConf().setAppName("BanKeyNum")
    val sc: SparkContext = new SparkContext(conf)
    val inputPath: String = "file/WeiBoData/input"
    val outputPath: String = "file/WeiBoData/output/BanKeyNum"

    val dataRdd: RDD[(String, Int)] = sc.textFile(args(0))
      .filter(x => {(x.contains("傻逼")|x.contains("脑瘫"))})
      .map(x => {
        (x.split("\t")(0),1)
      })
      .reduceByKey(_ + _)
      .sortBy(_._2,ascending = false)

    sc.parallelize(dataRdd.take(7)).saveAsTextFile(args(1))




    sc.stop()
  }
}
