package cn.doitedu.dwh

import org.apache.spark.sql.SparkSession

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-06-12
 * @desc 基于base cuboid进行再聚合
 */
object ApplogTrfcCubeReAggregate {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .config("spark.sql.shuffle.partitions","1")
      .appName("")
      .enableHiveSupport()
      .master("local")
      .getOrCreate()


    // 注册自定义的bitmap聚合函数
    import org.apache.spark.sql.functions._
    spark.udf.register("bmp_or",udaf(BitmapAggregator))



    val getBitMapCardinality = (arrBitmap:Array[Byte]) =>{
      BmpUtils.byteArray2Bmp(arrBitmap).getCardinality
    }
    spark.udf.register("bmp_size",getBitMapCardinality)

    //  加载数据
    val res = spark.sql(
      """
        |
        |select
        |   province,
        |   city,
        |   region,
        |   bmp_or(guid_bmp) as guid_bmp,
        |   bmp_size(bmp_or(guid_bmp)) as uv_cnt,
        |   sum(pv_cnt) as pv_cnt
        |from test.bitmap_cnt_dstc_base_cuboid
        |group by province,city,region
        |with rollup
        |
        |""".stripMargin)

    res.show(100,false)

    spark.close()

  }

}
