package cn.dmp.report

import java.util.Properties

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}


/**
  * 广告投放的地域分布统计
  * 实现方式：（读取parquet文件）
  *     (1)sparkSQL实现（就用sparkSession）。
  *     (2)spark算子实现（即spark core，就用sparkContext））
  */
object AreaAnalyseRpt_sparkSQL {
  def main(args: Array[String]): Unit = {
    /**
      * (1)sparkSQL实现（就用sparkSession）。
      *
      */

    // 0.校验参数个数
    if (args.length != 2){
      println(
        """
          |cn.dmp.report.AreaAnalyseRpt
          |参数：
          |   logInputPath
        """.stripMargin)
    }
    // 1.接收程序参数
    val Array(logInputPath, resultOutputPath) = args
    // 2.创建sparkSession
    val sparkSession: SparkSession = SparkSession.builder().appName("AreaAnalyseRpt").master("local[*]")
      .enableHiveSupport()
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") //RDD序列化到磁盘，worker到worker之间的数据传输
      .getOrCreate()

    // 3.读取parquet文件
    val parquetDF: DataFrame = sparkSession.read.parquet(logInputPath)

    // 4. 业务逻辑
    /**
      * 注册一个视图createOrReplaceTempView
      */
    parquetDF.createOrReplaceTempView("log")
    val res: DataFrame = sparkSession.sql(
      """
        |select
        |Provincename, Cityname,
        |sum(case when Requestmode=1 and Processnode>=2 then 1 else 0 end) `有效请求数`,
        |sum(case when Requestmode=1 and Processnode=3 then 1 else 0 end) `广告请求数`,
        |sum(case when Iseffective=1 and Isbilling=1 and Isbid=1 and Adorderid!=0 then 1 else 0 end) `参与竞价数`,
        |sum(case when Iseffective=1 and Isbilling=1 and Iswin=1 then 1 else 0 end) `竞价成功数`,
        |sum(case when Requestmode=2 and Iseffective=1 then 1 else 0 end) `展示数`,
        |sum(case when Requestmode=3 and Iseffective=1 then 1 else 0 end) `点击数`,
        |sum(case when Iseffective=1 and Isbilling=1 and Iswin=1 then 1.0*Adpayment/1000 else 0 end) `广告成本`,
        |sum(case when Iseffective=1 and Isbilling=1 and Iswin=1 then 1.0*Winprice/1000 else 0 end) `广告消费`
        |from log
        |group by Provincename, Cityname
      """.stripMargin)
    res.show()
    //写到本地目录中
    res.write.mode(SaveMode.Overwrite).parquet(resultOutputPath)

    /**
      * 可以把查询后的数据写入到任何地方：mysql/文件/hbase/redis/...等等
      *
      */
    //写到mysql中
    val load: Config = ConfigFactory.load()
    val props = new Properties()
    props.setProperty("user", load.getString("jdbc.user"))
    props.setProperty("password", load.getString("jdbc.password"))
    res.show()
    res.write.mode(SaveMode.Overwrite).jdbc(load.getString("jdbc.url"), load.getString("jdbc.arearpt.tableName"), props)

    sparkSession.stop()

    //本地测试运行：!!!!!注意提前设置好参数：输入目录和输出目录
    /**
      * 输入目录：E:\大数据培训视频\项目+spark面试+flink+项目\项目\DMP\biz2parquet
      * 输出目录：E:\大数据培训视频\项目+spark面试+flink+项目\项目\DMP\AreaAnalyseRpt_sparkSQL
      */

  }
}
