package cn.doitedu.spark

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import org.apache.commons.codec.binary.Base64
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.roaringbitmap.RoaringBitmap
import org.roaringbitmap.RoaringBitmapWriter.RoaringBitmapWizard

import scala.collection.mutable

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-16
 * @desc bitmap基本测试
 */
object BitMapTest {

  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)

    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .getOrCreate()


    val df = spark.read.option("header", "true").csv("spark_teach/data/bitmap/input/a.csv")


    val toBitmap = (ids:mutable.WrappedArray[String])=>{
      val arr = ids.map(_.toInt).toArray
      val bitmap = RoaringBitmap.bitmapOf(arr: _*)

      val bout = new ByteArrayOutputStream()
      val dout = new DataOutputStream(bout)
      bitmap.serialize(dout)

      val bytes = Base64.encodeBase64String(bout.toByteArray)
      bytes
    }

    spark.udf.register("torbm",toBitmap)

    df.createTempView("df")
    val tmp = spark.sql(
      """
        |
        |select
        |
        |province,
        |city,
        |region,
        |torbm(collect_set(guid)) as ids
        |
        |from df
        |group by province,city,region
        |
        |
        |""".stripMargin)

    tmp.show()
    tmp.createTempView("tmp")


    val reagg = (arr:mutable.WrappedArray[String])=>{

      val ids = arr.toArray
      val bmstr1 = ids(0)

      val bm1 = RoaringBitmap.bitmapOf()
      val din1 = new DataInputStream(new ByteArrayInputStream(Base64.decodeBase64(bmstr1)))
      bm1.deserialize(din1)

      for(i <- 1 until ids.length){
        val bm = RoaringBitmap.bitmapOf()
        val din = new DataInputStream(new ByteArrayInputStream(Base64.decodeBase64(ids(i))))
        bm.deserialize(din)
        bm1.or(bm)
      }

      bm1.getCardinality


    }
    spark.udf.register("reagg",reagg)
    val tmp2 = spark.sql(
      """
        |
        |select
        |province,city,
        |reagg(collect_set(ids)) as cnt
        |
        |from tmp
        |
        |group by province,city
        |""".stripMargin)

    tmp2.show()
    tmp2.createTempView("tmp2")

    val tmp3 = spark.sql(
      """
        |
        |select
        |province,
        |reagg(collect_set(ids)) as cnt
        |
        |from tmp
        |
        |group by province
        |""".stripMargin)

    tmp3.show()

    spark.close()
  }

}
