package com.cmnit.analysis.dao

import com.cmnit.analysis.common.TDao
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class EnTollDao extends TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def enTollData(sqlTime: String, acctTime: String): DataFrame = {
    sparkSQL(
      "SELECT " +
        "nvl(id,' ') id, " +
        "nvl(passId,' ') passId, " +
        "nvl(vlpc,' ') vlpc, " +
        "getVehiclePlateMD5(concat(vlp,'_',vlpc)) vlp, " +
        "nvl(enTollStationId,' ') enTollStationId, " +
        "nvl(enTime,' ') enTime, " +
        "nvl(vehicleType,' ') vehicleType, " +
        "nvl(vehicleClass,' ') vehicleClass, " +
        "nvl(mediaType,' ') mediaType, " +
        "nvl(vCount,' ') vCount, " +
        "nvl(laneType,' ') laneType, " +
        "nvl(direction,' ') direction, " +
        "nvl(enWeight,' ') enWeight, " +
        "nvl(axisInfo,' ') axisInfo, " +
        "'" + acctTime + "' as statishour, " +
        "'" + acctTime.substring(0, 8) + "' as statisday " +
        "FROM " +
        "ods.ods_etc_tollenbillinfo " +
        "WHERE "
        + sqlTime +
        "AND transCode in ('0101','0103','0106') ")
  }
}
