package com.lvmama.rhino.common.utils.sharedvariable

import java.util.{Map => JMap}
import java.util.Collections
import java.util.concurrent.ConcurrentHashMap
import collection.JavaConversions._

import org.apache.spark.util.AccumulatorV2

/**
  * Created by yuanxiaofeng on 2017/3/21.
  */
class SessionAccumulator extends AccumulatorV2[(String, Long), JMap[String, (String, Long)]]{
  private val _map = new ConcurrentHashMap[String, (String, Long)]()
  private var _count = 0L

  override def isZero: Boolean = _map.isEmpty && _count == 0

  override def merge(other: AccumulatorV2[(String, Long), JMap[String, (String, Long)]]): Unit =
    other match {
      case o: SessionAccumulator =>
        _map.putAll(other.value)
      case _ =>
        throw new UnsupportedOperationException(
          s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
  }

  override def copy(): AccumulatorV2[(String, Long), JMap[String, (String, Long)]] = {
    val newAcc = new SessionAccumulator
    _map.synchronized {
      newAcc._map.putAll(_map)
    }
    newAcc
  }

  override def value: JMap[String, (String, Long)] = _map.synchronized {
    Collections.unmodifiableMap(new ConcurrentHashMap[String, (String, Long)](_map))
  }

  override def reset(): Unit = _map.clear()

  def recycle(): Unit = {
    for (m <- _map) {
      if (m._2._2 - System.currentTimeMillis() > 30 * 60 * 1000) {
        _map.remove(m._1)
        _count -= 1
      }
    }
  }

  @deprecated
  override def add(v: (String, Long)): Unit = {
    val id = uuid
    _map.put(v._1, (id, v._2))
  }

  def addWithResult(v: (String, Long)): String = {
    if (_map.containsKey(v)){
      val (_uuid, _time) = _map.get(v)
      if (v._2 - _time <= 30 * 60 * 1000) {
        _uuid
      } else {
        val id = uuid
        _map.put(v._1, (id, v._2))
        id
      }
    } else {
      val id = uuid
      _map.put(v._1, (id, v._2))
      _count += 1
      id
    }
  }

  def uuid = java.util.UUID.randomUUID.toString.replace("-", "")
}
