package cn.lecosa.spark
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.HashPartitioner

object Max {
  def main(args: Array[String]) {

    val conf = new SparkConf(); //创建SparkConf对象  设置spark参数
    conf.setAppName("wow,My First Spark App!"); //设置应用程序的名称，在spark程序运行的监控界面可以看到名称
    conf.setMaster("local[2]"); //此时，程序在本地运行，不需要安装spark集群

    val sc = new SparkContext(conf);

    val lines = sc.textFile("F:/spark/workspace/SparkDemo01/max", 3)
    val r1 = lines.filter(_.trim.length() > 0).map { num1 => ("key", num1.trim.toInt) };

    val r2 = r1.groupByKey().map(x => {
      var min = Integer.MAX_VALUE
      var max = Integer.MIN_VALUE
      for (num <- x._2) {
        if (num > max) {
          max = num
        }
        if (num < min) {
          min = num
        }
      }
      (max, min)
    })

    r2.foreach { println }
    sc.stop();
  }
}