package com.cmnit.analysis.dao

import com.cmnit.analysis.common.{TDao, TSql}
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class GbTollPlazaDicDao extends TSql with TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def GbTollPlazaDicData(): DataFrame = {
    sparkSQL(
      "SELECT " +
        "id, " +
        "name, " +
        "plazatype, " +
        "stakenum, " +
        "lat, " +
        "lng, " +
        "etclanecount, " +
        "mtclanecount, " +
        "mixlanecount, " +
        "usestatus, " +
        "operation, " +
        "recordgentime, " +
        "responsecode, " +
        "responseinfo, " +
        "receivetime, " +
        "protime, " +
        "batchfilename, " +
        "status, " +
        "ifcontainlimitweight, " +
        "limitweightpos, " +
        "lanecount, " +
        "passtype, " +
        "isbearinggantry, " +
        "bearinggantryid " +
        "FROM " +
        "DIM.DIM_TBL_GbTollPlazaDic ")
  }
}
