package cn.doitedu.dwetl

import java.util.Properties

import org.apache.spark.sql.SparkSession

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-01-18
 * @desc 分享事件多维分析基础bitmap表计算
 *
 *  目标表建表语句：
CREATE TABLE dws.shareevent_overview_bitmap(
   cat_name      string,
   brand_name    string,
   page_id       string,
   lanmu_name    string,
   share_method  string,
   hour_range    int,
   device_type   string,
   guid_bitmap   binary,
   share_cnt     bigint
)
PARTITIONED BY (dt  string)
STORED AS parquet
 *
 *
 */
object ShareEventBitmapCalc {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("多维分析计算（利用bitmap做逐层聚合）")
      .enableHiveSupport()
      //.config("spark.sql.shuffle.partitions",10)
      .master("local[*]")
      .getOrCreate()
    import org.apache.spark.sql.functions._
    import spark.implicits._


    // 加载分享事件明细数据
    val shareEvents = spark.read.table("dwd.event_app_detail")
      .where(s"dt='${args(0)}' and eventid='share'")
      // 列裁剪
      .select("guid","devicetype","timestamp","properties")

    // 加载商品信息维表
    val props = new Properties()
    props.load(ShareEventBitmapCalc.getClass.getClassLoader.getResourceAsStream("db.properties"))
    val productInfo = spark.read.jdbc(props.getProperty("url"),"dim_product_info",props)

    // 加载页面信息维表
    val pageInfo = spark.read.jdbc(props.getProperty("url"),"dim_page_info",props)

    shareEvents.createTempView("events")
    productInfo.createTempView("productinfo")
    pageInfo.createTempView("pageinfo")


    // 星型模型思想
    // 星型模型，是一种数仓中的建模思想（表结构设计）
    // 它的特点是，提炼出 “事实表”，并提炼出相关分析需要的维度表，那么，这个事实表+这些维度表，就是一个模型（星型模型）
    // 在具体分析运算时：拿着中心表（事实表） 关联各种  维度表  ，形成一张大宽表之后计算
    val joined = spark.sql(
      """
        |
        |select
        |  cast(d.id as int),
        |  a.devicetype as device_type,
        |  hour(from_unixtime(cast(a.timestamp/1000 as bigint))) as hour_range,
        |  nvl(b.cat_name,'未知') as cat_name,
        |  nvl(b.brand_name,'未知') as brand_name,
        |  a.properties['pageId'] as page_id,
        |  nvl(c.lanmu_name,'未知') as lanmu_name,
        |  a.properties['shareMethod'] as share_method
        |
        |from events a left join productinfo b  on a.properties['productId']=b.id
        |             left join pageinfo c  on a.properties['pageId']=c.id
        |             join dwd.user_guid_global d on a.guid=d.guid
        |
        |
        |
        |""".stripMargin)


    joined.createTempView("joined")


    import cn.doitedu.dwetl.utils.RrUtils._
    import cn.doitedu.dwetl.utils.RrbitmapOrAggregationFunction

    spark.udf.register("to_bitmap",toBitmap)
    spark.udf.register("rr_or",RrbitmapOrAggregationFunction)

    spark.sql(
      s"""
        |
        |INSERT INTO TABLE dws.shareevent_overview_bitmap PARTITION(dt='${args(0)}')
        |
        |SELECT
        |   cat_name,
        |   brand_name,
        |   page_id,
        |   lanmu_name,
        |   share_method,
        |   hour_range,
        |   device_type,
        |   to_bitmap(collect_set(id)) as guid_bitmap,
        |   count(1) as share_cnt
        |
        |FROM joined
        |GROUP BY
        |   cat_name,
        |   brand_name,
        |   page_id,
        |   lanmu_name,
        |   share_method,
        |   hour_range,
        |   device_type
        |
        |""".stripMargin)


    spark.close()

  }

}
