package util

import org.apache.commons.math3.distribution.PoissonDistribution
import org.apache.spark.rdd.RDD

import scala.collection.mutable.ArrayBuffer
import scala.collection.{Map, mutable}

object DataSampleUtils {
  private val log = org.slf4j.LoggerFactory.getLogger(this.getClass)

  def BernoulliSampling[K, V](rdd: RDD[(K, V)], fractions: Map[K, Double],
                              seed: Long = 0L): (RDD[(K, V)], RDD[(K, V)]) = {
    // determine threshold for each stratum and resample
    val finalResult = getAcceptanceResults(rdd, false, fractions, None, seed)
    println("finalResult=====:" + finalResult.keys )
    finalResult.foreach(fr=>{
      println( "finalResult=====:" +fr._1+fr._2.waitList)
    }

    )

    val samplingRateByKey = computeThresholdByKey(finalResult, fractions)
    println("samplingRateByKey=====:" + samplingRateByKey)
    val originRdd = rdd.mapPartitionsWithIndex {
      case (idx, iterKV) =>
        val rng = new RandomDataGenerator()
        rng.reSeed(seed + idx)
        // Must use the same invoke pattern on the rng as in getSeqOp for without replacement
        // in order to generate the same sequence of random numbers when creating the sample
        iterKV.filter(t => rng.nextUniform() < samplingRateByKey(t._1))
    }

    val otherRdd = rdd.mapPartitionsWithIndex {
      case (idx, iterKV) =>
        val rng = new RandomDataGenerator()
        rng.reSeed(seed + idx)
        // Must use the same invoke pattern on the rng as in getSeqOp for without replacement
        // in order to generate the same sequence of random numbers when creating the sample
        iterKV.filter(t => rng.nextUniform() >= samplingRateByKey(t._1))
    }
    (originRdd, otherRdd)
  }

  /**
    * Count the number of items instantly accepted and generate the waitlist for each stratum.
    *
    * This is only invoked when exact sample size is required.
    */
  def getAcceptanceResults[K, V](rdd: RDD[(K, V)],
                                 withReplacement: Boolean,
                                 fractions: Map[K, Double],
                                 counts: Option[Map[K, Long]],
                                 seed: Long): mutable.Map[K, AcceptanceResult] = {
    val combOp = getCombOp[K]

    val mappedPartitionRDD = rdd.mapPartitionsWithIndex { case (partition, iter) =>

      val zeroU: mutable.Map[K, AcceptanceResult] = new mutable.HashMap[K, AcceptanceResult]()
      val rng = new RandomDataGenerator()
      rng.reSeed(seed + partition)
      val seqOp = getSeqOp(withReplacement, fractions, rng, counts)
      Iterator(iter.aggregate(zeroU)(seqOp, combOp))
    }
    mappedPartitionRDD.reduce(combOp)
  }

  /**
    * Returns the function used by aggregate to collect sampling statistics for each partition.
    */
  def getSeqOp[K, V](withReplacement: Boolean,
                     fractions: Map[K, Double],
                     rng: RandomDataGenerator,
                     counts: Option[Map[K, Long]]):
  (mutable.Map[K, AcceptanceResult], (K, V)) => mutable.Map[K, AcceptanceResult] = {
    val delta = 5e-5
    (result: mutable.Map[K, AcceptanceResult], item: (K, V)) => {
      val key = item._1
      val fraction = fractions(key)
      if (!result.contains(key)) {
        result += (key -> new AcceptanceResult())
      }
      val acceptResult = result(key)

      if (withReplacement) {
        // compute acceptBound and waitListBound only if they haven't been computed already
        // since they don't change from iteration to iteration.
        // TODO change this to the streaming version
        if (acceptResult.areBoundsEmpty) {
          val n = counts.get(key)
          val sampleSize = math.ceil(n * fraction).toLong
          val lmbd1 = PoissonBounds.getLowerBound(sampleSize)
          val lmbd2 = PoissonBounds.getUpperBound(sampleSize)
          acceptResult.acceptBound = lmbd1 / n
          acceptResult.waitListBound = (lmbd2 - lmbd1) / n
        }
        val acceptBound = acceptResult.acceptBound
        val copiesAccepted = if (acceptBound == 0.0) 0L else rng.nextPoisson(acceptBound)
        if (copiesAccepted > 0) {
          acceptResult.numAccepted += copiesAccepted
        }
        val copiesWaitlisted = rng.nextPoisson(acceptResult.waitListBound)
        if (copiesWaitlisted > 0) {
          acceptResult.waitList ++= ArrayBuffer.fill(copiesWaitlisted)(rng.nextUniform())
        }
      } else {
        // We use the streaming version of the algorithm for sampling without replacement to avoid
        // using an extra pass over the RDD for computing the count.
        // Hence, acceptBound and waitListBound change on every iteration.
        acceptResult.acceptBound =
          BinomialBounds.getLowerBound(delta, acceptResult.numItems, fraction)
        acceptResult.waitListBound =
          BinomialBounds.getUpperBound(delta, acceptResult.numItems, fraction)

        val x = rng.nextUniform()
        if (x < acceptResult.acceptBound) {
          acceptResult.numAccepted += 1
        } else if (x < acceptResult.waitListBound) {
          acceptResult.waitList += x
        }
      }
      acceptResult.numItems += 1
      result
    }
  }

  /**
    * Returns the function used combine results returned by seqOp from different partitions.
    */
  def getCombOp[K]: (mutable.Map[K, AcceptanceResult], mutable.Map[K, AcceptanceResult])
    => mutable.Map[K, AcceptanceResult] = {
    (result1: mutable.Map[K, AcceptanceResult], result2: mutable.Map[K, AcceptanceResult]) => {
      // take union of both key sets in case one partition doesn't contain all keys
      result1.keySet.union(result2.keySet).foreach { key =>
        // Use result2 to keep the combined result since r1 is usual empty
        val entry1 = result1.get(key)
        if (result2.contains(key)) {
          result2(key).merge(entry1)
        } else {
          if (entry1.isDefined) {
            result2 += (key -> entry1.get)
          }
        }
      }
      result2
    }
  }


  /**
    * Given the result returned by getCounts, determine the threshold for accepting items to
    * generate exact sample size.
    *
    * To do so, we compute sampleSize = math.ceil(size * samplingRate) for each stratum and compare
    * it to the number of items that were accepted instantly and the number of items in the waitlist
    * for that stratum.
    *
    * Most of the time,
    * {{{
    * numAccepted <= sampleSize <= (numAccepted + numWaitlisted)
    * }}}
    * which means we need to sort the elements in the waitlist by their associated values in order
    * to find the value T s.t.
    * {{{
    * |{elements in the stratum whose associated values <= T}| = sampleSize
    * }}}.
    * Note that all elements in the waitlist have values greater than or equal to bound for instant
    * accept, so a T value in the waitlist range would allow all elements that were instantly
    * accepted on the first pass to be included in the sample.
    */
  def computeThresholdByKey[K](finalResult: Map[K, AcceptanceResult],
                               fractions: Map[K, Double]): Map[K, Double] = {
    val thresholdByKey = new mutable.HashMap[K, Double]()
    for ((key, acceptResult) <- finalResult) {
      println("fractions.get(key)"+fractions.get(key))
//      if(0.0 == fractions.get(key).get){
//        thresholdByKey += (key -> 0.0)
//      }      else
//       {
        val sampleSize = math.ceil(acceptResult.numItems * fractions(key)).toLong
        println(key+"==sampleSize=="+sampleSize)

        if (acceptResult.numAccepted > sampleSize) {
          //        logWarning("Pre-accepted too many")
          thresholdByKey += (key -> acceptResult.acceptBound)
        } else {

          val numWaitListAccepted = (sampleSize - acceptResult.numAccepted).toInt
          println(key+"==numWaitListAccepted=="+numWaitListAccepted+"==="+acceptResult.waitList.size)
          if (numWaitListAccepted >= acceptResult.waitList.size) {
            //          logWarning("WaitList too short")
            thresholdByKey += (key -> acceptResult.waitListBound)
          } else {
            thresholdByKey += (key -> acceptResult.waitList.sorted.apply(numWaitListAccepted))
          }
        }
//      }

    }
    thresholdByKey
  }

  /** A random data generator that generates both uniform values and Poisson values. */
  private class RandomDataGenerator {
    val uniform = new XORShiftRandom()
    // commons-math3 doesn't have a method to generate Poisson from an arbitrary mean;
    // maintain a cache of Poisson(m) distributions for various m
    val poissonCache = mutable.Map[Double, PoissonDistribution]()
    var poissonSeed = 0L

    def reSeed(seed: Long): Unit = {
      uniform.setSeed(seed)
      poissonSeed = seed
      poissonCache.clear()
    }

    def nextPoisson(mean: Double): Int = {
      val poisson = poissonCache.getOrElseUpdate(mean, {
        val newPoisson = new PoissonDistribution(mean)
        newPoisson.reseedRandomGenerator(poissonSeed)
        newPoisson
      })
      poisson.sample()
    }

    def nextUniform(): Double = {
      uniform.nextDouble()
    }
  }

  class AcceptanceResult(var numItems: Long = 0L, var numAccepted: Long = 0L)
    extends Serializable {

    val waitList = new ArrayBuffer[Double]
    var acceptBound: Double = Double.NaN // upper bound for accepting item instantly
    var waitListBound: Double = Double.NaN // upper bound for adding item to waitlist

    def areBoundsEmpty: Boolean = acceptBound.isNaN || waitListBound.isNaN

    def merge(other: Option[AcceptanceResult]): Unit = {
      if (other.isDefined) {
        waitList ++= other.get.waitList
        numAccepted += other.get.numAccepted
        numItems += other.get.numItems
      }
    }
  }

}
