package doit20.datayi.etl

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-18
 * @desc 流量分析报表：每个省,每种手机型号,下的： pv总数，uv总数，访问总时长
 */
object ApplogTrafficReport1 {

  def main(args: Array[String]): Unit = {


    val spark = SparkSession.builder()
      .master("local")
      .appName("")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._


    val table = spark.read.table("doit20dws.app_tfc_dimension_aggr_base").where("dt='2021-04-14'")
    table.createTempView("t")


    val function: UserDefinedFunction = udaf(BitMapAggrCard)
    spark.udf.register("bitmap_aggr_card",function)

    spark.sql(
      """
        |
        |select
        |   province,
        |   devicetype,
        |    -- pv总数，访问总时长，uv总数
        |   sum(pv_cnt) as pv_cnt,
        |   sum(acctime_amt) as acctime_amt,
        |   bitmap_aggr_card(uv_bitmap) as uv_cnt
        |from t
        |group by province,devicetype
        |
        |""".stripMargin).show(100,false)


    spark.close()

  }

}
