package region_distribution

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by yangqiyuan on 2018/3/28.
  */
object RegionDistributionRdd {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    .setAppName(RegionDistribution.getClass.getSimpleName)
    .setMaster("local[*]")
    val spark: SparkContext = new SparkContext(conf)
    val sqlContext: SQLContext = new SQLContext(spark)
    val parquet: DataFrame = sqlContext.read.parquet("parquet")
    parquet.registerTempTable("t_show")
    val sql: DataFrame = sqlContext.sql(
      """
   |select provincename,cityname,
   |count(case when requestmode=1 and processnode>=1 then 1 else null end)as init_request,
   |count(case when requestmode=1 and processnode>=2 then 1 else null end)as effective_request,
   |count(case when requestmode=1 and processnode=3 then 1 else null end)as ad_request,
   |count(case when iseffective=1 and isbilling=1 and isbid=1 and adorderid!=1 then 1 else null end)as bid_count,
   |count(case when iseffective=1 and isbilling=1 and iswin=1 then 1 else null end)as sucess_bid_count,
   |count(case when requestmode=2 and iseffective=1 then 1 else null end)as show_count,
   |count(case when requestmode=3 and iseffective=1 then 1 else null end)as click_count,
   |count(case when iseffective=1 and isbilling=1 and iswin=1 then winprice/1000 else null end)as ad_cost,
   |count(case when iseffective=1 and isbilling=1 and iswin=1 then adpayment/1000 else null end)as ad_consume
from
t_show group by provincename,cityname

      """.stripMargin)
    sql.show(100)
    val properties: Properties = new Properties()
    properties.setProperty("username","root")
    properties.setProperty("password","")
    sql.write.jdbc("jdbc:mysql://localhost:3306/test?characterEncoding=utf-8","t_show",properties)

  }
}
