package com.cmnit.analysis.dao

import com.cmnit.analysis.common.TDao
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class EnTollFlowDao extends TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def enTollFlowData(sqlTime: String, acctTime: String): DataFrame = {
    sparkSQL(
      "SELECT " +
        "NVL(enTollStationId,' ') enTollStationId, " +
        "NVL(enTollStation,' ') enTollStation, " +
        "NVL(mediaType,' ') mediaType, " +
        "NVL(vehicleClass,' ') vehicleClass, " +
        "NVL(vehicleType,' ') vehicleType, " +
        "COUNT(*) countNum, " +
        "'" + acctTime + "' as statisHour, " +
        "'" + acctTime.substring(0, 8) + "' as statisDay " +
        "FROM " +
        "ods.ods_etc_tollenbillinfo " +
        "WHERE "
        + sqlTime +
        "AND transCode IN ('0101','0103','0106') " +
        "GROUP BY NVL(enTollStationId,' '),NVL(enTollStation,' '),NVL(mediaType,' '),NVL(vehicleClass,' '),NVL(vehicleType,' ') "
    )
  }
}
