package doit20.datayi.etl

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.expressions.Aggregator
import org.roaringbitmap.RoaringBitmap

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-18
 * @desc bitmap聚合函数：3个泛型分别是：输入的数据类型，中间BUFFER的数据类型，最终返回的数据类型
 */
object BitMapAggrCard extends Aggregator[Array[Byte],Array[Byte],Int]{

  // 对buffer进行初始化—— 构造一个空的bitmap的序列化数组
  override def zero: Array[Byte] = {
    val bm: RoaringBitmap = RoaringBitmap.bitmapOf()
    ser(bm)
  }

  // 对同组数据中的目标字段进行局部聚合
  override def reduce(buf: Array[Byte], in: Array[Byte]): Array[Byte] = {
    val buffBitmap = der(buf)
    val inBitmap = der(in)
    buffBitmap.or(inBitmap)
    ser(buffBitmap)
  }

  // 对同组数据中心的局部聚合结果buffer，进行全局再聚合
  override def merge(buff1: Array[Byte], buff2: Array[Byte]): Array[Byte] = {
    val buffBitmap1 = der(buff1)
    val buffBitmap2 = der(buff2)

    buffBitmap1.or(buffBitmap2)

    ser(buffBitmap1)
  }

  // 对全局聚合完后的buffer数据，计算出最终返回值
  override def finish(mergedBuff: Array[Byte]): Int = {
    der(mergedBuff).getCardinality
  }

  override def bufferEncoder: Encoder[Array[Byte]] = Encoders.BINARY

  override def outputEncoder: Encoder[Int] = Encoders.scalaInt

  def ser(bitmap:RoaringBitmap): Array[Byte] ={
    val baOut = new ByteArrayOutputStream()
    val dataOut = new DataOutputStream(baOut)
    bitmap.serialize(dataOut)
    baOut.toByteArray
  }

  def der(bytes:Array[Byte]): RoaringBitmap ={
    val bitMap = RoaringBitmap.bitmapOf()
    val baIn = new ByteArrayInputStream(bytes)
    val dataIn = new DataInputStream(baIn)

    bitMap.deserialize(dataIn)

    bitMap
  }

}
