package util

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame

import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag

/**
  * @author Alex
  */
object RandomUtils {
  def randomSplitTwoParts[T: ClassTag](datas: RDD[T], weights: Array[Double], seed: Long): Array[RDD[T]] = {
    if (weights.length != 2) {
      throw new Exception("only spilt two parts")
    }

    val firstSplit = datas.randomSplit(weights, seed)
    //adjust the size
    val total = datas.count()
    val sum = weights.sum
    val normalizedCumWeights = weights.map(_ / sum).scanLeft(0.0d)(_ + _)
    val normalizedCount = normalizedCumWeights
      .sliding(2)
      .map { x =>
        ((x(0) * total).toLong, (x(1) * total).toLong)
      }
      .toArray

    val countArray = firstSplit.map {
      _.count
    }

    if (countArray.apply(0) == normalizedCount.apply(0)._2) {
      firstSplit
    } else {
      val rddArray = new ArrayBuffer[RDD[T]]
      if (countArray.apply(0) > normalizedCount.apply(0)._2) {
        rddArray += firstSplit
          .apply(0)
          .zipWithIndex
          .filter {
            _._2 < normalizedCount.apply(0)._2
          }
          .map(_._1)
        val externRdd = firstSplit
          .apply(0)
          .zipWithIndex
          .filter {
            _._2 >= normalizedCount.apply(0)._2
          }
          .map(_._1)
        val tempRdd = (externRdd.++(firstSplit.apply(1)))
        rddArray += (tempRdd)
      } else {
        val externRdd = firstSplit
          .apply(1)
          .zipWithIndex
          .filter {
            _._2 < (normalizedCount.apply(0)._2 - countArray.apply(0))
          }
          .map(_._1)
        val tempRdd = (firstSplit.apply(0).++(externRdd))
        rddArray += tempRdd
        rddArray += firstSplit
          .apply(1)
          .zipWithIndex
          .filter {
            _._2 >= (normalizedCount.apply(0)._2 - countArray.apply(0))
          }
          .map(_._1)
      }
      rddArray.toArray
    }
  }

  def randomSplitTwoParts[T: ClassTag](datas: DataFrame, weights: Array[Double], seed: Long): Array[DataFrame] = {
    if (weights.length != 2) {
      throw new Exception("only spilt two parts")
    }

    val sparkSession = datas.sparkSession
    val firstSplit = datas.randomSplit(weights, seed)
    //adjust the size
    val total = datas.count()
    val sum = weights.sum
    val normalizedCumWeights = weights.map(_ / sum).scanLeft(0.0d)(_ + _)
    val normalizedCount = normalizedCumWeights
      .sliding(2)
      .map { x =>
        ((x(0) * total).toLong, (x(1) * total).toLong)
      }
      .toArray

    val countArray = firstSplit.map {
      _.count
    }

    if (countArray.apply(0) == normalizedCount.apply(0)._2) {
      firstSplit
    } else {
      val rddArray = new ArrayBuffer[DataFrame]
      val schema = datas.schema
      if (countArray.apply(0) > normalizedCount.apply(0)._2) {

        rddArray += sparkSession.createDataFrame(
          firstSplit
            .apply(0)
            .rdd
            .zipWithIndex
            .filter {
              _._2 < normalizedCount.apply(0)._2
            }
            .map(_._1),
          schema
        )
        val externRdd = firstSplit
          .apply(0)
          .rdd
          .zipWithIndex
          .filter {
            _._2 >= normalizedCount.apply(0)._2
          }
          .map(_._1)
        val tempRdd = (externRdd.++(firstSplit.apply(1).rdd))
        rddArray += (sparkSession.createDataFrame(tempRdd, schema))
      } else {
        val externRdd = firstSplit
          .apply(1)
          .rdd
          .zipWithIndex
          .filter {
            _._2 < (normalizedCount.apply(0)._2 - countArray.apply(0))
          }
          .map(_._1)
        val tempRdd = (firstSplit.apply(0).rdd.++(externRdd))
        rddArray += sparkSession.createDataFrame(tempRdd, schema)
        rddArray += sparkSession.createDataFrame(
          firstSplit
            .apply(1)
            .rdd
            .zipWithIndex
            .filter {
              _._2 >= (normalizedCount.apply(0)._2 - countArray.apply(0))
            }
            .map(_._1),
          schema
        )
      }
      rddArray.toArray
    }
  }
}
