package com.zt.bigdata.spark.dataalgorithms.chapter01

import java.util
import java.util.function.BiFunction

/**
  *
  */

import scala.collection.JavaConverters._

object DataStructures {
  def merge(small: util.SortedMap[Int, Int],
            large: util.SortedMap[Int, Int]): util.SortedMap[Int, Int] = {
    small.asScala.foreach {
      kv =>
        large.merge(kv._1, kv._2, new BiFunction[Int, Int, Int] {
          override def apply(largeValue: Int, smallValue: Int): Int = {
            if (null != largeValue)
              return largeValue + smallValue
            else smallValue
          }
        })
    }
    large
  }

  def main(args: Array[String]): Unit = {
    val large = new util.TreeMap[Int, Int]()
    large.put(1, 1)
    large.put(2, 2)
    large.put(3, 3)
    large.put(4, 4)

    val small = new util.TreeMap[Int, Int]()
    small.put(1, 1)
    small.put(2, 2)
    merge(large, small).asScala.foreach(kv => println(kv))

  }
}
