package cn.doitedu.datayi.utils

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.expressions.Aggregator
import org.roaringbitmap.RoaringBitmap


object BitmapUDAF extends Aggregator[Int,Array[Byte], Array[Byte]]{

  /**
   * 初始化buffer，本质上是弄了一个空的bitmap并序列化得到的字节数组
   * @return
   */
  override def zero: Array[Byte] = {

    val buffBitmap = RoaringBitmap.bitmapOf()

    // 序列化
    serBitmap(buffBitmap)
  }

  // 聚合逻辑，本质上a就是我们函数接收的一组guid中的1个，我们要做的就是这1个guid添加到buf的bitmap中
  override def reduce(b: Array[Byte], guid: Int): Array[Byte] = {
    // 反序列出buf的bitmap对象
    val bitmap = desBitmap(b)

    // 添加新的元素
    bitmap.add(guid)

    // 将添加元素后的bitmap序列化成字节数组
    serBitmap(bitmap)

  }

  override def merge(b1: Array[Byte], b2: Array[Byte]): Array[Byte] = {

    val bm1 = desBitmap(b1)
    val bm2 = desBitmap(b2)
    bm1.or(bm2)

    serBitmap(bm1)
  }

  override def finish(reduction: Array[Byte]): Array[Byte] = reduction


  override def bufferEncoder: Encoder[Array[Byte]] = Encoders.BINARY

  override def outputEncoder: Encoder[Array[Byte]] = Encoders.BINARY


  def serBitmap(bitmap:RoaringBitmap):Array[Byte]={

    val baout = new ByteArrayOutputStream()
    val dout = new DataOutputStream(baout)
    bitmap.serialize(dout);

    baout.toByteArray
  }


  def desBitmap(b:Array[Byte]):RoaringBitmap ={
    val buffBitmap = RoaringBitmap.bitmapOf()

    val baIn = new ByteArrayInputStream(b)
    val dataIn = new DataInputStream(baIn)
    buffBitmap.deserialize(dataIn)

    buffBitmap
  }



}
