package cn.doitedu.spark

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import org.apache.spark.sql.SparkSession
import org.roaringbitmap.RoaringBitmap

import scala.collection.mutable

object AggregateDemo {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .master("local[*]")
      .config("spark.sql.shuffle.partitions","10")
      .appName("")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val bitmap: mutable.WrappedArray[Int] => Array[Byte] = (arr:mutable.WrappedArray[Int])=>{
      val bmp = RoaringBitmap.bitmapOf(arr.toArray: _*)
      val ba = new ByteArrayOutputStream()
      val dout = new DataOutputStream(ba)
      bmp.serialize(dout)
      ba.toByteArray
    }


    val bmcard =udf( (arr:Array[Byte])=>{
      val bm = new RoaringBitmap()
      val bi = new ByteArrayInputStream(arr.toArray)
      val di = new DataInputStream(bi)
      bm.deserialize(di)
      bm.getCardinality

    })


    spark.udf.register("bitmap",bitmap)
    spark.udf.register("bmcard",bmcard)


    /*val ds = spark.createDataset(Seq(
      "1,a,p1,c1",
      "2,b,p1,c1",
      "3,c,p1,c2",
      "3,c,p2,c1",
      "2,b,p2,c1",
      "4,d,p1,c1",
      "5,e,p3,c3",
      "5,e,p3,c1"
    ))

    val data =  ds.map(s=>{
      val arr = s.split(",")
      (arr(0).toInt,arr(1),arr(2),arr(3))
    }).toDF("id","name","p","c")

    data.createTempView("data")
    spark.sql(
      """
        |insert into table  test.bm
        |select
        |p,c,bitmap(collect_set(id)) as ids
        |from data
        |group by p,c
        |
        |
        |
        |""".stripMargin).show(100,false)*/

    //spark.read.table("test.bm").select('p,'c,bmcard('ids)).show()


    spark.udf.register("bmor",Bmxor)

    spark.sql(
      """
        |
        |
        |select
        |p,bmcard(bmor(ids)) as cnt
        |from test.bm
        |group by p
        |
        |
        |""".stripMargin).show()



    spark.close()


  }

}
