package cn.doitedu.spark

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import org.apache.hadoop.io.DataOutputOutputStream
import org.apache.spark.sql.SparkSession
import org.roaringbitmap.RoaringBitmap

import scala.collection.mutable

/**
 * @author hunter.d
 * @date 2021-01-10
 * @desc haha
 * @contact qq657270652
 */
object SparkDemo {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .master("local")
      .appName("")
      .getOrCreate()
    import spark.implicits._

    val ds = spark.createDataset(Seq(
      "1,a,p1,c1",
      "2,b,p1,c1",
      "3,c,p1,c2",
      "3,c,p2,c1",
      "2,b,p2,c1",
      "4,d,p1,c1",
      "5,e,p3,c3",
      "5,e,p3,c1"
    ))

     val data =  ds.map(s=>{
        val arr = s.split(",")
        (arr(0).toInt,arr(1),arr(2),arr(3))
      }).toDF("id","name","p","c")


    val bitmap = (arr:mutable.WrappedArray[Int])=>{
      val bmp = RoaringBitmap.bitmapOf(arr.toArray: _*)
      val ba = new ByteArrayOutputStream()
      val dout = new DataOutputStream(ba)
      bmp.serialize(dout)
      ba.toByteArray
    }

    import org.apache.spark.sql.functions._
    val bmcard = udf((arr:mutable.WrappedArray[Byte])=>{
      val bm = new RoaringBitmap()
      val bi = new ByteArrayInputStream(arr.toArray)
      val di = new DataInputStream(bi)

      bm.deserialize(di)
      bm.getCardinality
    })

    spark.udf.register("bitmap",bitmap)
    spark.udf.register("bmcard",bmcard)
    data.createTempView("data")

    spark.read.table("test.bm").select('p,'c,bmcard('ids)).show()


    spark.sql(
      """
        |
        |select
        |p,c,
        |ids
        |
        |
        |from (
        |select
        |p,c,bitmap(collect_set(id)) as ids
        |from data
        |group by p,c
        |)
        |
        |
        |
        |""".stripMargin).rdd.map(row=>{

      val p = row.getAs[String]("p")
      val c = row.getAs[String]("c")
      val ids = row.getAs[Array[Byte]]("ids")
      (p,c,ids)

    }).foreach(println)





    spark.close()





  }

}
