package com.edu360.select

import com.edu360.utils.{AreaDistributionUtil, ToMysqlUtils}
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object SelectAreaDistributionSQL {
  def main(args: Array[String]): Unit = {
    // 0 校验参数个数
    if (args.length != 1) {
      println(
        """
          |cn.dmp.tools.Bzip2Parquet
          |参数：
          | logInputPath
        """.stripMargin)
      sys.exit()
    }
    // 1 接受程序参数
    val Array(logInputPath) = args
    // 2 创建sparkconf->sparkContext
    val sparkConf = new SparkConf()
    sparkConf.setAppName(s"${this.getClass.getSimpleName}")
    sparkConf.setMaster("local[*]")
    val sc = new SparkContext(sparkConf)
    // RDD 序列化到磁盘 worker与worker之间的数据传输
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sQLContext = new SQLContext(sc)
    //读取parquet文件
    val parquet: DataFrame = sQLContext.read.parquet(logInputPath)
    ////把DataFrame先注册临时表
    parquet.registerTempTable("log")

    //计算原始请求
    val sql = sQLContext.sql("select provincename,cityname,sum(case when requestmode=1 and processnode>=1 then 1 else 0 end) 原始请求数,sum(case when requestmode=1 and processnode>=2 then 1 else 0 end) 有效请求数,sum(case when requestmode=1 and processnode=3 then 1 else 0 end) 广告请求数,sum(case when iseffective=1 and isbilling=1 and isbid=1 and adorderid != 0 then 1 else 0 end) 参与竞价数,sum(case when iseffective=1 and isbilling=1 and iswin=1 then 1 else 0 end) 竞价成功数,sum(case when requestmode=1 and iseffective=2 then 1 else 0 end) 展示数,sum(case when requestmode=1 and iseffective=3 then 1 else 0 end) 点击数,sum(case when iseffective=1 and isbilling=1 and iswin=1 then winprice/1000 else 0 end) DSP广告消费,sum(case when iseffective=1 and isbilling=1 and iswin=1 then adpayment/1000 else 0 end) DSP广告成本 from log group by provincename,cityname")
    ToMysqlUtils.dfToSql(sql,"areaDistribution1")
    sc.stop()
  }
}
