package cn.doitedu.dmp

import java.util.Properties

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DmpReports {

  def main(args: Array[String]): Unit = {


    Logger.getLogger("org").setLevel(Level.WARN)

    val spark = SparkSession.builder()
      .appName("画像标签计算")
      .master("local")
      .getOrCreate()

    import spark.implicits._


    val df: DataFrame = spark.read.parquet("user_portrait/data/dmp_log_output/2020-09-01")


    df.createTempView("df")

    /**
     * 原始请求，processnode=0
     * 有效请求：processnode=2    iseffective=1
     * 广告请求：processnode=3
     * 参与竞价：processnode=3   isbid=1
     * 竞价成功：processnode=3   isbid=1  iswin=1
     * 广告展示：requestmode=1
     * 广告点击：requestmode=2
     */
    val res = spark.sql(
      """
        |
        |select
        |'2020-09-01' as dt,
        |provincename,cityname,district,networkmannername,ispname,adspacetypename,device,client,adplatformproviderid,
        |count(if(processnode=0,1,null)) as org_requests,
        |count(if(processnode=2 and iseffective=1,1,null)) as ef_requests,
        |count(if(processnode=3,1,null)) as ad_requests,
        |count(if(processnode=3 and isbid=1,1,null)) as bid_requests,
        |count(if(processnode=3 and isbid=1 and iswin=1,1,null)) as win_requests,
        |count(if(requestmode=1,1,null)) as ad_show_cnts,
        |count(if(requestmode=2,1,null)) as ad_click_cnts
        |
        |from df
        |
        |group by provincename,cityname,district,networkmannername,ispname,adspacetypename,device,client,adplatformproviderid
        |grouping sets((provincename),(provincename,cityname),(provincename,cityname,district),(device),(device,client),(ispname),(networkmannername),(ispname,networkmannername),(adspacetypename),(adplatformproviderid),(adplatformproviderid,adspacetypename))
        |
        |""".stripMargin)

    // val props = new Properties()
    // props.setProperty("user","root")
    // props.setProperty("password","123456")
    // res.write.mode(SaveMode.Append).jdbc("jdbc:mysql://doitedu01:3306/dmp","ad_report",props)

    res.show(100,false)

    spark.close()

  }



}
