package cn.doit.report

import cn.doit.common.DolphinAppComm
import cn.doit.config.DolphinConfig
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}


object TerritoryCountSQL {
  def main(args: Array[String]): Unit = {
    if (args.length!=1){
      println(
        """
          |cn.doit.report.TerritoryCount
          |<inputpath>
        """.stripMargin)
      sys.exit(-1)
    }
    val Array(inputpath)=args
    val sc = DolphinAppComm.creatSparkContext("cn.doit.report.TerritoryCount")
    val sqlc = new SQLContext(sc)

    val file: DataFrame = sqlc.read.parquet(inputpath)
    file.registerTempTable("dolphin_table_2")

    // spark sql提供了udf功能
    sqlc.udf.register("sheepif", (bool: Boolean, right: Double, wrong: Double) => if (bool) right else wrong)

    /*  |  ,valid_request,advertising_request,bidding_num,bid_success_num,num,click,expense,costing*/
    val result = sqlc.sql(
      """
        |select provincename, cityname, substring(requestdate, 0, 10) day,
        |sum(case when requestmode=1 and processnode>=1 then 1 else 0 end) rawReq,
        |sum(case when requestmode=1 and processnode>=2 then 1 else 0 end) effReq,
        |sum(case when requestmode=1 and processnode=3 then 1 else 0 end) adReq,
        |
        |sum(sheepif(iseffective=1 and isbilling=1 and isbid=1 and adorderid!=0, 1, 0)) isRTB,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1, 1, 0)) succRTB,
        |
        |sum(if(requestmode=2 and iseffective=1, 1, 0)) adShow,
        |sum(if(requestmode=3 and iseffective=1, 1, 0)) adClik,
        |
        |sum(if(iseffective=1 and isbilling=1 and iswin=1, winprice/1000, 0)) expense,
        |sum(if(iseffective=1 and isbilling=1 and iswin=1, adpayment/1000, 0)) cost
        |
        |from dolphin_table_2
        |group by provincename, cityname, substring(requestdate, 0, 10)
      """.stripMargin)
    result.write.json("json_2")
    result.write.mode(SaveMode.Append).jdbc(DolphinConfig._url, "r_mytable_2",DolphinConfig.props)


    sc.stop()
  }
}
