package com.spark.mooc.ch5_rdd.part04_cases

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description:
 * @time: 2020/11/27 16:07
 * @author: lhy
 */
object MaxAndMin {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("MaxAndMin").setMaster("local")
        val sc = new SparkContext(conf)
        sc.setLogLevel("ERROR")     //设置日志格式级别
        val lines: RDD[String] = sc.textFile("input/rdd/maxAndmin/*", 2)
//        val result: Unit = lines.filter(_.trim().length > 0)
//                          .map(line => ("key",line.trim.toInt))
//                          .groupByKey()
//                          .map(x => {
//                                var min = Integer.MAX_VALUE
//                                var max = Integer.MIN_VALUE
//                                for (num <- x._2){
//                                    if (num > max)
//                                        max = num
//                                    if (num < min)
//                                        min = num
//                                }
//                                (max,min)
//                          }).collect().foreach(x => {
//                                println("max\t"+x._1)
//                                println("min\t"+x._2)
//                                })
        val results = lines.filter(line => line.trim().length > 0)
          .map(x => (x.toInt,1))
          .sortByKey(false)
          .map(_._1)
          .collect()
        println("Max: " + results(0))
        println("Min: " + results(results.length - 1))
    }
}
