package com.cmnit.analysis.dao

import com.cmnit.analysis.common.TDao
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class ExTollFlowDao extends TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def exTollFlowData(sqlTime: String, acctTime: String): DataFrame = {
    sparkSQL(
      "SELECT " +
        "NVL(exTollStationId,' ') exTollStationId, " +
        "NVL(exTollStation,' ') exTollStation, " +
        "NVL(mediaType,' ') mediaType, " +
        "NVL(exVehicleClass,' ') exVehicleClass, " +
        "NVL(exVehicleType,' ') exVehicleType, " +
        "NVL(transPayType,' ') transPayType, " +
        "COUNT(*) countNum, " +
        "SUM(payFee) payFee, " +
        "SUM(discountFee) discountFee, " +
        "SUM(getFee(multiProvince, fee, splitProvince)) fee, " +
        "'" + acctTime + "' as statisHour, " +
        "'" + acctTime.substring(0, 8) + "' as statisDay " +
        "FROM " +
        "ods.ods_etc_tollexbillinfo " +
        "WHERE "
        + sqlTime +
        "AND transCode IN ('0201','0203','0206') " +
        "AND exTollStationId <> 'G0105360010010' " +
        "AND getFee(multiProvince, fee, splitProvince) <= 100000000 " +
        "GROUP BY NVL(exTollStationId,' '),NVL(exTollStation,' '),NVL(mediaType,' '),NVL(exVehicleClass,' '),NVL(exVehicleType,' '),NVL(transPayType,' ') "
    )
  }
}
