package hou.report

import java.util.Properties

import hou.config.ConfigHelper
import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

//用sparksql的方式分析地区分布
object AreaAnalysisSql {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer", ConfigHelper.serializer)

    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)

    val dataFrame = sQLContext.read.parquet(ConfigHelper.parquetPath)

    dataFrame.registerTempTable("log")
    val result = sQLContext.sql(
      """
        |select provincename,cityname,
        |sum(if(requestmode=1 and processnode>=1,1,0)) as adoldreq,
        |sum(if(requestmode=1 and processnode>=2,1,0)) as adeffreq,
        |sum(if(requestmode=1 and processnode>=3,1,0)) as adreq,
        |sum(if(iseffective=1 and isbilling=1 and isbid=1 and adorderid !=0,1,0)) as adrtbreq,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1,1,0)) as adsuccreq,
        |sum(if(requestmode=2 and iseffective=1,1,0)) as adshow,
        |sum(if(requestmode=3 and iseffective=1,1,0)) as adclick,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1,adpayment/1000,0)) as adcost,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1,winprice/1000,0)) as adcus
        |from log
        |group by provincename,cityname
      """.stripMargin)
    val props = new Properties()
    props.setProperty("driver", ConfigHelper.driver)
    props.setProperty("user", ConfigHelper.user)
    props.setProperty("password", ConfigHelper.password)
    result.write.mode(SaveMode.Overwrite).jdbc(ConfigHelper.url, ConfigHelper.table, props)

    sc.stop()
  }
}
