package cn.doitedu.datayi.etl

import java.io.{ByteArrayInputStream, DataInputStream}

import cn.doitedu.datayi.utils.{BitMapAggr, BitmapUDAF}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.roaringbitmap.RoaringBitmap

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-08-09
 * @desc
 *       1,IP1,s1,a,N,200,MI6,台湾省,1
 *       1,IP1,s1,b,N,200,MI6,台湾省,1
 *       3,IP1,s1,c,N,400,MI6,台湾省,1
 *       1,IP2,s2,a,Y,200,MI6,台湾省,0
 *       2,IP3,s3,b,N,200,MI5,扶桑省,0
 *       3,IP3,s3,c,N,500,MI6,扶桑省,0
 *       3,IP3,s4,c,Y,500,MI6,扶桑省,0
 */
object RollupBitmapDemo {
  def main(args: Array[String]): Unit = {

    Logger.getLogger("org.apache").setLevel(Level.WARN)

    val spark = SparkSession.builder()
      .appName("rollup test")
      .master("local")
      .config("fs.defaultFS","file:///")
      .getOrCreate()


    val logDf = spark.read.option("header","true").option("inferSchema","true").csv("data/rollup/rollup.csv")
    logDf.createTempView("log")


    // 调用自定义聚合函数，求bitmap

    import org.apache.spark.sql.functions._
    val mybitmap = udaf(BitmapUDAF)

    spark.udf.register("agr2bitmap",mybitmap)


    val getcardinality = (b:Array[Byte])=>{
      val buffBitmap = RoaringBitmap.bitmapOf()
      val baIn = new ByteArrayInputStream(b)
      val dataIn = new DataInputStream(baIn)
      buffBitmap.deserialize(dataIn)
      buffBitmap.getCardinality
    }
    spark.udf.register("getcnt",getcardinality)


    val 维度多的聚合结果 = spark.sql(
      """
        |
        |select
        | devicetype,
        | province,
        | isnew,
        | getcnt(agr2bitmap(guid)) as uv,
        | agr2bitmap(guid) as uv_bitmap,
        | sum(acc_tl) as acc_tl
        |
        |from log
        |group by devicetype,province,isnew
        |
        |""".stripMargin)


    维度多的聚合结果.createTempView("dim_more")
    维度多的聚合结果.show(100,false)


    // 从维度多的聚合结果中，直接计算出维度少的聚合结果
    spark.udf.register("bm_aggr",udaf(BitMapAggr))
    val 维度少的聚合结果 = spark.sql(
      """
        |
        |select
        |  devicetype,
        |  bm_aggr(uv_bitmap) uv_bitmap,
        |  getcnt(bm_aggr(uv_bitmap)) as uv_cnt
        |
        |from dim_more
        |group by devicetype
        |
        |""".stripMargin)


    维度少的聚合结果.show(100,false)


    spark.close()

  }

}
