package com.cmnit.analysis.dao

import com.cmnit.analysis.common.{TDao, TSql}
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class GbTollLaneDicDao extends TSql with TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def GbTollLaneDicData(): DataFrame = {
    sparkSQL(
      "SELECT " +
        "id, " +
        "stationid, " +
        "laneid, " +
        "lanetype, " +
        "greentraffic, " +
        "tidaltime, " +
        "starttime, " +
        "endtime, " +
        "status, " +
        "lanehex, " +
        "rsumanuid, " +
        "rsumodel, " +
        "rsuid, " +
        "rsuverid, " +
        "operation, " +
        "recordgentime, " +
        "usestatus, " +
        "responsecode, " +
        "responseinfo, " +
        "receivetime, " +
        "protime, " +
        "batchfilename, " +
        "entryexittype, " +
        "railingpos, " +
        "ifcontainlimitweight, " +
        "vplrmanuid, " +
        "reallanenum " +
        "FROM " +
        "DIM.DIM_TBL_GbTollLaneDic ")
  }
}
