package cn.doit.report

import cn.doit.bean.AreaReportResult
import cn.doit.common.{DolphinAppComm, DolphinKPI}
import cn.doit.config.DolphinConfig
import org.apache.spark.sql.{SQLContext, SaveMode}


object TerritoryCountCore {
  def main(args: Array[String]): Unit = {
    if (args.length != 1) {
      println(
        """
          |cn.doit.report.TerritoryCount
          |<inputpath>
        """.stripMargin)
      sys.exit(-1)
    }
    val Array(inputpath) = args
    val sc = DolphinAppComm.creatSparkContext("cn.doit.report.TerritoryCount")
    val sqlc = new SQLContext(sc)
    val dataFrame = sqlc.read.parquet(inputpath)

    import sqlc.implicits._
    dataFrame.map(row => {

      // 获取省市及日期
      val day = row.getAs[String]("requestdate").substring(0, 10)
      val proName = row.getAs[String]("provincename")
      val cityName = row.getAs[String]("cityname")


      ((day, proName, cityName),DolphinKPI.caculateKpi(row))
    }).reduceByKey((a, b) => {
      a.zip(b).map(tp => tp._1 + tp._2)
    }).map {
      case ((day, province, city), List(rawReq, effReq, adReq, isRTB, isuccRTB, adShow, adClik, expense, cost))
      =>
        AreaReportResult(day, province, city, rawReq.toInt, effReq.toInt, adReq.toInt, isRTB.toInt, isuccRTB.toInt, adShow.toInt, adClik.toInt, expense, cost)
    }

   .toDF().write.mode(SaveMode.Append).jdbc(DolphinConfig._url, "my_dolphin_xiaoli", DolphinConfig.props)


    sc.stop()
  }
}
