package com.txl.cn.spark02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by txl on 2017/12/27.
  */
object YearDemo {
  //u5 22 xa2
  //u5 22 xa2,null null null
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("YearDemo").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val lines: RDD[String] = sc.textFile(args(0))
    val lines2 = sc.textFile(args(1))
    val data1: RDD[(String, String)] = lines.map({
      lines =>
        val strings = lines.split(" ", 2)
        (strings(0), strings(1))
    })
    val data2: RDD[(String, String)] = lines2.map({
      lines =>
        val strings = lines.split(" ", 2)
        (strings(0), strings(1))
    })
    val data: RDD[(String, String)] = data1 union(data2)
    val res: RDD[(String, Iterable[String])] = data.groupByKey()
    res.mapValues({
      t=>
        val v1: String = t.head
        val v2: Iterable[String] = t.tail
        //v2.foreach(println)
        val str1: String = if (v2.isEmpty) {
          "null null null"
        } else {
          val str: List[String] = v2.toList.sortBy({
            t =>
              t.split(" ")(0)
          })
          str.mkString(" ")
         /* v2.toList.sortWith({
            case (x, y) =>
              x.split(" ")(0) > y.split(" ")(0)
          }).mkString(" ")*/
        }
        v1.concat(" "+str1)
    })foreach(println)


  }

}
