package report

import java.util.Properties

import Configer.Configer
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, SaveMode}

//地域分布sql
object AreaAnalysisSQL {
  def main(args: Array[String]): Unit = {
    //sparkcontext
    val conf = new SparkConf().setAppName(s"${this.getClass.getName}")
      .setMaster("local[*]").set("spark.serializer", Configer.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    sQLContext.udf.register("myif", (boolean: Boolean) => if (boolean) 1 else 0)
    //处理数据
    dataFrame.registerTempTable("logs")
    val result = sQLContext.sql(
      """
        |select provincename,cityname,
        |sum(myif(requestmode=1 and processnode>=1)) as allreq,
        |sum(case when requestmode=1 and processnode>=2 then 1 else 0 end) as effreq,
        |sum(if(requestmode=1 and processnode=3,1,0)) as adreq,
        |sum(if(iseffective=1 and isbilling=1 and isbid=1 and adorderid!=0,1,0)) as adjoin,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1,1,0)) as adsucess,
        |sum(if(requestmode=2 and iseffective=1,1,0)) as adshow,
        |sum(if(requestmode=3 and iseffective=1,1,0)) as adclick,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1,adpayment/1000,0)) as adpay,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1,winprice/1000,0)) as adwin
        |from logs
        |group by provincename,cityname
      """.stripMargin)

    //存储数据
    val props = new Properties()
    props.setProperty("driver", Configer.driver)
    props.setProperty("user", Configer.user)
    props.setProperty("password", Configer.password)
    result.write.mode(SaveMode.Overwrite).jdbc(Configer.url, "Area34", props)
    //释放资源
    sc.stop()
  }
}
