package doit20.datayi.utils

import java.io.{ByteArrayOutputStream, DataOutputStream}

import doit20.datayi.etl.BitMapAggrCard
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.roaringbitmap.RoaringBitmap

object BitmapFunctionTest {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .master("local")
      .appName("")
      .config("spark.sql.shuffle.partitions","1")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._


    val bitmap: Array[Int] => Array[Byte] = (guids:Array[Int])=>{
      val bm = RoaringBitmap.bitmapOf(guids: _*)
      val baout = new ByteArrayOutputStream()
      val dataout = new DataOutputStream(baout)
      bm.serialize(dataout)

      baout.toByteArray
    }
    spark.udf.register("bitmap",bitmap)


    val testData = spark.createDataset(Seq(
      ("a","c",1),
      ("a","c",2),
      ("a","c",3),
      ("a","d",1),
      ("a","d",2),
      ("a","d",4),
    )).toDF("province","city","guid")

    testData.createTempView("t")


    // 省、市，uvbitmap
    // a, c ,[0,1,1,1]
    // a, d ,[1,0,1,1]
    val baseAggr = spark.sql(
      """
        |
        |select
        |province,city,bitmap(collect_set(guid)) as uv_bimap
        |from t
        |group by province,city
        |
        |""".stripMargin)
    baseAggr.createTempView("base")


    // 省,uv_nct
    val function: UserDefinedFunction = udaf(BitMapAggrCard)
    spark.udf.register("bitmap_aggr_card",function)

    val res = spark.sql(
      """
        |
        |select
        |province,
        |bitmap_aggr_card(uv_bimap) as uv_cnt
        |from base
        |group by province
        |
        |""".stripMargin)

    res.show(100,false)




    val res2 = spark.sql(
      """
        |
        |select
        |province,city,
        |bitmap_aggr_card(uv_bimap) as uv_cnt
        |from base
        |group by province,city
        |
        |""".stripMargin)

    res2.show(100,false)


    spark.close()

  }
}
