
import org.apache.spark.sql.{DataFrame, SparkSession}

import java.text.SimpleDateFormat
import java.util.Date





object SparkAdsLogApp {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder
      .appName("SparkAdsLogApp")
      .master("local[2]")
      .config("spark.sql.shuffle.partitions","2")
      .getOrCreate()
//
//    推广平台名称,事件类型,广告编号,事件时间,客户端IP地址,客户端设备编号,客户端操作系统
//    platform_name,event_type,ad_id,event_time,client_ip,device_id,client_os



    val df=spark.read
      .schema("platform_name STRING,event_type STRING,ad_id STRING,event_time STRING,client_ip STRING,device_id STRING,client_os STRING")
      .csv("D:\\IdeaProjects\\zg6\\sparksql\\src\\main\\java\\ads-data.csv")
    df.createTempView("tmp_view_ads_log")
    spark.sql(
      """
        |SELECT *FROM
        |tmp_view_ads_log
        |""".stripMargin).show()

    //    1、将字段中空值（或空字符串）替换为"unknown"；
    val df1=df.na.fill("unknown")
//    df1.show(10)
    //    2、自定义UDF函数，实现功能：时间戳Long转换日期时间String；
    spark.udf.register("convert_timestamp",
      (ts : Long)=>{
        val sdf=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
        sdf.format(new Date(ts))
    })

    val df2: DataFrame = spark.sql(
      """
        |SELECT
        |  event_time,
        |  convert_timestamp(cast(event_time AS BIGINT)) AS date_str
        |  , from_unixtime(cast(event_time AS BIGINT)/1000, 'yyyy-MM-dd HH:mm:ss') AS ds_str
        |FROM tmp_view_ads_log
        |""".stripMargin
    )
//    df2.show(10, truncate = false)


    //    3、计算1：每天各个推广平台不同客户端操作系统的广告投放量；
        val sql3=spark.sql(
          """
            |SELECT SUBSTRING(convert_timestamp(cast(event_time AS BIGINT)),1,10) ts
            |,platform_name
            |,client_ip,count(1) FROM tmp_view_ads_log
            |GROUP BY SUBSTRING(convert_timestamp(cast(event_time AS BIGINT)),1,10),platform_name,client_ip
            |""".stripMargin)
//        sql3.show()
    //    4、计算2：各推广平台、设备类型和操作系统计用户不同广告上的曝光数量、点击数据量和点击率；
    val sql4=spark.sql(
      """
        |SELECT platform_name,device_id,client_os
        |,client_ip
        |,sum(if(event_type='click' OR event_type='impression',1,0)) impression_count
        |,sum(if(event_type='click',1,0)) click_count
        |,round(sum(if(event_type='click',1,0))/sum(if(event_type='click' OR event_type='impression',1,0)),2) click_rate
        |FROM tmp_view_ads_log
        |GROUP BY platform_name,device_id,client_os,client_ip
        |""".stripMargin)
//    sql4.show()
    //    推广平台名称,事件类型,广告编号,事件时间,客户端IP地址,客户端设备编号,客户端操作系统
    //    5、计算3：:统计各推广平台Top5投放广告；
    val sql5=spark.sql(
      """
        |WITH tmp1 AS (
        |       SELECT platform_name,ad_id
        |       ,sum(if(event_type='click' OR event_type='impression',1,0)) impression_count
        |        FROM tmp_view_ads_log
        |       GROUP BY platform_name,ad_id
        | ), tmp2 AS(SELECT platform_name,ad_id,impression_count
        |       ,row_number() over (PARTITION BY platform_name  ORDER BY impression_count DESC) rk FROM
        |       tmp1
        | )SELECT platform_name,ad_id,impression_count,rk FROM tmp2 WHERE rk<=5;
        |""".stripMargin)
    sql5.show()
    spark.stop()
  }

}
