package com.cmnit.analysis.dao

import com.cmnit.analysis.common.{TDao, TSql}
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class GbTollStationDicDao extends TSql with TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def GbTollStationDicData(): DataFrame = {
    sparkSQL(
      "SELECT " +
        "id, " +
        "stationid, " +
        "name, " +
        "type, " +
        "tollplazacount, " +
        "neighborid, " +
        "operation, " +
        "recordgentime, " +
        "stationhex, " +
        "status, " +
        "responsecode, " +
        "responseinfo, " +
        "receivetime, " +
        "protime, " +
        "batchfilename, " +
        "linetype, " +
        "operators, " +
        "datamergepoint, " +
        "imei, " +
        "ip, " +
        "snmpversion, " +
        "snmpport, " +
        "community, " +
        "securityname, " +
        "securitylevel, " +
        "authentication, " +
        "authkey, " +
        "encryption, " +
        "secretkey, " +
        "regionalismcode, " +
        "usestatus, " +
        "realtype, " +
        "regionname, " +
        "countryname, " +
        "regionlismcode, " +
        "servermanuid, " +
        "serversysname, " +
        "serversysver, " +
        "serverdatever, " +
        "agencygantryids " +
        "FROM " +
        "DIM.DIM_TBL_GbTollStationDic ")
  }
}
