package rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RDD_DemoExample03 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("RDD_DemoExample03")

    val sc: SparkContext = new SparkContext(conf)
    val sortRDD: RDD[(String, String)] = sc
      .textFile("data/data.txt")
      .map(line => {
        val fields = line.split(",")
        val key = fields(0) + "-" + fields(1)
        val value = fields(2)
        (key, value)
      })
      .groupByKey()
      .mapValues(iter => {
        iter.toList.sortWith((t1, t2) => t1.toInt < t2.toInt).mkString(",")
      })
      .sortByKey()

    sortRDD
      .collect()
      .foreach(t => {
        println(t._1 + "\t" + t._2)
      })


    sc.stop()
  }
}
