package com.cmnit.analysis.dao

import com.cmnit.analysis.common.TDao
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class ExTollDao extends TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def exTollData(sqlTime: String, acctTime: String): DataFrame = {
    sparkSQL(
      "select " +
        "nvl(id,' ') id, " +
        "nvl(passId,' ') passId, " +
        "nvl(enVlpc,' ') enVlpc, " +
        "getVehiclePlateMD5(concat(enVlp,'_',enVlpc)) enVlp, " +
        "nvl(exTollStation,' ') exTollStation, " +
        "nvl(exTime,' ') exTime, " +
        "nvl(enTollStation,' ') enTollStation, " +
        "nvl(enTime,' ') enTime, " +
        "nvl(enVehicleType,' ') enVehicleType, " +
        "nvl(exVehicleType,' ') exVehicleType, " +
        "nvl(payType,' ') payType, " +
        "nvl(fee,' ') fee, " +
        "nvl(discountFee,' ') discountFee, " +
        "nvl(payFee,' ') payFee, " +
        "nvl(mediaType,' ') mediaType, " +
        "nvl(laneType,' ') laneType, " +
        "nvl(enVehicleClass,' ') enVehicleClass, " +
        "nvl(realDistance,' ') realDistance, " +
        "nvl(vehicleSign,' ') vehicleSign, " +
        "nvl(exWeight,' ') exWeight, " +
        "nvl(enWeight,' ') enWeight, " +
        "nvl(axisInfo,' ') axisInfo, " +
        "nvl(limitWeight,' ') limitWeight, " +
        "nvl(overWeightRate,' ') overWeightRate, " +
        "'" + acctTime + "' as statishour, " +
        "'" + acctTime.substring(0, 8) + "' as statisday " +
        "from " +
        "ods.ods_etc_tollexbillinfo " +
        "where "
        + sqlTime +
        "and (transcode='0201' or transcode='0206' or transcode='0203' or transcode='0205') " +
        "and fee<=100000000 " +
        "and extollstationid<>'G0105360010010' " +
        "and fee<=100000000 ")
  }
}
