package cn.doitedu.dwetl

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import cn.doitedu.dwetl.utils.{RrbitmapOrAggregationFunction, RrUtils}
import org.apache.spark.sql.SparkSession
import org.roaringbitmap.RoaringBitmap

import scala.collection.mutable

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-17
 * @desc cube逐级聚合（去重指标），用bitmap思想

create table test.bitmap_demo(
province  string,
city      string,
region    string,
bitmap    binary
)
stored as parquet
;


 *
 *
 */
object CubeDistinctAggregationRead {
  def main(args: Array[String]): Unit = {


    val spark = SparkSession.builder()
      .appName("")
      .enableHiveSupport()
      .master("local")
      .getOrCreate()
    import org.apache.spark.sql.functions._
    import spark.implicits._


    // 自定义一个函数，将一个整数数组，转成一个bitmap

    spark.udf.register("rrbm_card",RrUtils.getCard _)


    // 查询 省、市、区下的去重人数
    spark.sql(
      """
        |
        |select
        |province,
        |city,
        |region,
        |rrbm_card(bitmap) as uv
        |
        |from test.bitmap_demo
        |
        |""".stripMargin).show()


    // 计算省、市下的去重人数
    spark.udf.register("rrbm_agr_or",RrbitmapOrAggregationFunction)
    spark.sql(
      """
        |
        |select
        |province,
        |city,
        |-- rrbm_agr_or(bitmap) as bitmap,
        |rrbm_card(rrbm_agr_or(bitmap)) as uv
        |from test.bitmap_demo
        |group by
        | province,
        | city
        |with cube
        |
        |""".stripMargin).show(100,false)


    spark.close()
  }

}
