package com.itcast.report

import com.itcast.utils.{ConfigHandler, MysqlHandler}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

//地域分布情况（SQL）
//sql方式
object RptAreaAnalysisSQL {
  def main(args: Array[String]): Unit = {
    //创建sparkConf
    val sparkConf = new SparkConf()
      .setAppName("RptAreaAnalysisSQL")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    //创建sparkContext
    val sparkContext = new SparkContext(sparkConf)
    //创建sqlContext
    val sQLContext = new SQLContext(sparkContext)
    //读取数据
    val rawDataFrame = sQLContext.read.parquet(ConfigHandler.parquetPath)
    //创建临时表
    rawDataFrame.registerTempTable("log")
    //自定义udf
    sQLContext.udf.register("area_func",(boolean:Boolean,result:Double) => if (boolean) result else 0)
    //把日志数据查出来
    val result = sQLContext.sql(
      //requestmode	processnode	iseffective	isbilling	isbid	iswin	adorderid	adcreativeid
        """
        |select provincename,cityname,
        |sum(area_func(requestmode=1 and processnode>=1,1)) rawReq,
        |sum(case when requestmode=1 and processnode>=2 then 1 else 0 end) effReq,
        |sum(case when requestmode=1 and processnode=3 then 1 else 0 end) adReq,
        |sum(case when iseffective=1 and isbilling=1 and isbid=1 and adorderid!=0 then 1 else 0 end) rtbReq,
        |sum(case when iseffective=1 and isbilling=1 and iswin=1 then 1 else 0 end) winReq,
        |sum(case when requestmode=2 and iseffective=1 then 1 else 0 end) adShow,
        |sum(case when requestmode=3 and iseffective=1 then 1 else 0 end) adClick,
        |sum(case when iseffective=1 and isbilling=1 and iswin = 1 then winprice/1000 else 0 end) adCost,
        |sum(area_func(iseffective=1 and isbilling=1 and iswin=1,adpayment/1000)) adpayment
        |from log group by provincename,cityname
      """.stripMargin)
    //存储数据到mysql
    MysqlHandler.save2db(result,ConfigHandler.areatable)


    sparkContext.stop()

  }
}
