package org.zjt.spark.book
import org.apache.spark.{SparkConf, SparkContext}

object WeatherSort extends App {
  var sparkConf = new SparkConf().setMaster("local[2]").setAppName("WeatherSort")
  val sc = new SparkContext(sparkConf)

  val rdd = sc.textFile("D:\\Idea workspace\\scala-demo\\src\\main\\resource\\weather.txt").map {
    a => {
      val value: String = a.split("    ").last
      val var1 = a.split("    ").head
      val key: String = var1.split(",")(0) + "-" + var1.split(",")(1)+"\t"
      (key, value)
    }
  }.coalesce(2).reduceByKey(_ + "," + _).map(a => (a._1,SortHandler.sort(a._2))).distinct()

  println(rdd.collect().mkString("\n"))

  sc.stop()
}

object SortHandler {

  def sort(values: String): String = {
    val array: Array[Double] = values.split(",").map(a => a.toDouble)
    for (i <- 0 until array.length) {
      for (j <- i until array.length) {
        if(array(i) > array(j)){
          val min:Double = array(j)
          array(j) = array(i)
          array(i) =  min
        }
      }
    }
    array.mkString(" , ")
  }

  def main(args: Array[String]): Unit = {
    println(sort("2.3,2.1,1.5,8.1"))
  }


}
