package cn.pengpeng.dmp.report

import cn.pengpeng.dmp.utils.ConfigHandler
import org.antlr.v4.runtime.atn.ATNConfigSet.ConfigHashSet
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * 统计数据到区域
  */
object RptAreaAnalysisSQL {
  def main(args: Array[String]): Unit = {
    val inputPath = "d:\\data\\spark\\out1"
    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("RptAreaAnalysisSQL")
    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()
    // 读文件
    val parquet: DataFrame = spark.read.parquet(inputPath)

    //创建临时表
    parquet.createTempView("log")

    val result: DataFrame = spark.sql(
      """
        |select provincename , cityname ,
        |sum(case when requestmode=1 and processnode>=1 then 1 else 0 end) rawReq,
        |sum(case when requestmode=1 and processnode>=2 then 1 else 0 end) effReq,
        |sum(case when requestmode=1 and processnode=3 then 1 else 0 end) adReq,
        |sum(case when iseffective=1 and isbilling=1 and isbid=1 and adorderid!=0 then 1 else 0 end) rtbReq,
        |sum(case when iseffective=1 and isbilling=1 and iswin = 1 then 1 else 0 end) winReq,
        |sum(case when requestmode=2 and iseffective=1 then 1 else 0 end) adShow,
        |sum(case when requestmode=3 and iseffective=1 then 1 else 0 end) adClick,
        |sum(case when iseffective=1 and isbilling=1 and iswin = 1 then winprice/1000 else 0 end) adCost,
        |sum(case when iseffective=1 and isbilling=1 and iswin = 1 then adpayment/1000 else 0 end) adPayment
        |from log group by provincename,cityname
      """.stripMargin)

    // 存储数据到mysql中
    result.write.jdbc(ConfigHandler.url,ConfigHandler.areaAnalysis_table,ConfigHandler.dbProps)

    //释放资源
    spark.stop()
  }

}
