import org.apache.spark.{SparkConf, SparkContext}

object MaxAndMin {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("MaxMinValue").setMaster("local")
    conf.set("spark.testing.memory", "2147480000")
    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")
    val mmrdd = sc.textFile("src/maxmin", 2)
    mmrdd.filter(_.trim().length() > 0).map(x => ("key", x.trim().toInt)).groupByKey().map(x => {
      var min = Integer.MAX_VALUE
      var max = Integer.MIN_VALUE
      for(i <- x._2){
        if(i < min){
          min = i
        }
        if(i > max){
          max = i
        }
      }
      (max, min)
    }).collect().foreach(x => {
      println("Max: "+x._1)
      println("Min: "+x._2)
    })
  }

}
