package cn.doitedu.dwetl

import cn.doitedu.dwetl.utils.RrUtils
import org.apache.spark.sql.{Encoder, SparkSession, TypedColumn}
import org.apache.spark.sql.expressions.Aggregator
import org.roaringbitmap.RoaringBitmap

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-18
 * @desc 分享事件多维分析cube报表
 *
 *  目标表建表语句：
CREATE TABLE dws.shareevent_overview_bitmap_cube(
   cat_name      string,
   brand_name    string,
   page_id       string,
   lanmu_name    string,
   share_method  string,
   hour_range    int,
   device_type   string,
   user_cnt      bigint,
   share_cnt     bigint
)
PARTITIONED BY (dt  string)
STORED AS parquet
 *
 *
 */

object ShareEventBitmapCubeCalc {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("多维分析计算（利用bitmap做逐层聚合）")
      .enableHiveSupport()
      //.config("spark.sql.shuffle.partitions",10)
      //.master("local[*]")
      .getOrCreate()
    import org.apache.spark.sql.functions._
    import spark.implicits._



    import cn.doitedu.dwetl.utils.RrUtils._
    import cn.doitedu.dwetl.utils.RrbitmapOrAggregationFunction

    spark.udf.register("rr_or",RrbitmapOrAggregationFunction)
    spark.udf.register("rr_card",getCard _)


    /* Aggregator版自定义UDAF
    val rr_or = new Aggregator[Array[Byte],Array[Byte],Array[Byte]] {
      override def zero: Array[Byte] = {
        val rr = new RoaringBitmap()
        RrUtils.ser(rr)
      }

      override def reduce(b: Array[Byte], a: Array[Byte]): Array[Byte] = {
        val bitmapa: RoaringBitmap = RrUtils.de(a)
        val bitmapb: RoaringBitmap = RrUtils.de(b)
        bitmapb.or(bitmapa)
        RrUtils.ser(bitmapb)
      }

      override def merge(b1: Array[Byte], b2: Array[Byte]): Array[Byte] = {
        reduce(b1,b2)
      }

      override def finish(reduction: Array[Byte]): Array[Byte] = reduction
      override def bufferEncoder: Encoder[Array[Byte]] = newByteArrayEncoder
      override def outputEncoder: Encoder[Array[Byte]] = newByteArrayEncoder
    }
    spark.udf.register("rr_or",udaf(rr_or))*/
    spark.sql(
      s"""
        |
        |INSERT INTO TABLE dws.shareevent_overview_bitmap_cube PARTITION(dt='${args(0)}')
        |select
        |   cat_name     ,
        |   brand_name   ,
        |   page_id      ,
        |   lanmu_name   ,
        |   share_method ,
        |   hour_range   ,
        |   device_type  ,
        |   rr_card(rr_or(guid_bitmap)) as user_cnt,
        |   sum(share_cnt) as share_cnt
        |
        |from dws.shareevent_overview_bitmap
        |where dt='${args(0)}'
        |group by
        |   cat_name     ,
        |   brand_name   ,
        |   page_id      ,
        |   lanmu_name   ,
        |   share_method ,
        |   hour_range   ,
        |   device_type
        |with cube
        |
        |""".stripMargin).show(20,false)


    spark.close()

  }
}
