package com.etc


import org.apache.spark.{SparkConf, SparkContext}

object WorldCount {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("WorldCount").setMaster("local")
    //    conf.set("spark.testing.memory", "2147480000")//后面的值大于512m即可
    val sc = new SparkContext(conf)
    sc.textFile("E:\\b.txt")
      .flatMap(_.split(" "))
      .map(a => (a, 1))
//      .reduceByKey(_ + _).map(a => (a._2, a._1))
//      .sortByKey()
//      .map(a => (a._2, a._1))
      .foreach(println)
    sc.stop()

  }
}

//排序
object sort {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("sort").setMaster("local")
    val sc = new SparkContext(conf)
    val line = sc.textFile("E:\\sort.txt", 1)

    val unit = line.map { line =>
      (
        ScalaSort(line.split(" ")(0).toInt, line.split(" ")(1).toInt), line
      )
    }

    unit.sortByKey().map(li => li._2).foreach(println)
  }
}

//WorldCount之后排序
object sortTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("sortTest").setMaster("local")
    val sc = new SparkContext(conf)
    sc.textFile("E:\\a.txt")
      .flatMap(_.split(" "))
      .map(a => (a, 1))
      .reduceByKey(_ + _).map { line =>
      (
        sortTest(line._2.toInt), line
      )
    }.sortByKey().map(a => a._2).foreach(println)
    sc.stop()

  }

}
//自定义排序
case class sortTest(val first: Int) extends Ordered[sortTest] with Serializable {
  override def compare(that: sortTest): Int = {
    if (this.first - that.first != 0) {
      that.first - this.first
    } else {
      return 0
    }
  }
}

//对文本文件内的数字，取最大的前3个。
object top {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("top").setMaster("local")
    val sc = new SparkContext(conf)
    sc.textFile("E:\\input\\top.txt")
      .map { line =>
        (
          sortTest(line.toInt), line
        )
      }.sortByKey().map(a => a._2).top(3).foreach(println)
    sc.stop()

  }
}

//对每个班级内的学生成绩，取出前3名。
object topN {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("topN").setMaster("local")
    val sc = new SparkContext(conf)
    sc.textFile("E:\\input\\score.txt")
      .map(line =>
        (line.split(" ")(0), line.split(" ")(1))
      ).groupByKey()
      .map(a => (a._1,a._2.toBuffer.sortWith(_>_).take(3)))
      .foreach(a => {
        println(a._1)
        for (b <- a._2){
          println(b)
        }
      })
        sc.stop()
  }
}