package com.cmnit.analysis.dao

import com.cmnit.analysis.common.TDao
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class GantryFlowDao extends TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def gantryFlowData(sqlTime: String, acctTime: String): DataFrame = {
    sparkSQL(
      "SELECT " +
        "NVL(gantryId,' ') gantryId, " +
        "NVL(mediaType,' ') mediaType, " +
        "NVL(vehicleClass,' ') vehicleClass, " +
        "NVL(vehicleType,' ') vehicleType, " +
        "COUNT(*) countNum, " +
        "SUM(payFee) payFee, " +
        "SUM(discountFee) discountFee, " +
        "SUM(fee) fee, " +
        "'" + acctTime + "' as statisHour, " +
        "'" + acctTime.substring(0, 8) + "' as statisDay " +
        "FROM " +
        "ods.ods_etc_gantryetcbill " +
        "WHERE "
        + sqlTime +
        "and ((mediaType='1' and obuTradeResult='0') or (mediaType='2' and tradeResult='0')) " +
        "GROUP BY NVL(gantryId,' '),NVL(mediaType,' '),NVL(vehicleClass,' '),NVL(vehicleType,' ') "
    )
  }
}
