package com.cmnit.analysis.dao

import com.cmnit.analysis.common.{TDao, TSql}
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class GbSectionOwnerDicDao extends TSql with TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def gbSectionOwnerDicData(): DataFrame = {
    sparkSQL(
      "SELECT " +
        "id, " +
        "ownerid, " +
        "name, " +
        "contact, " +
        "tel, " +
        "address, " +
        "bank, " +
        "bankaddr, " +
        "bankaccount, " +
        "taxpayercode, " +
        "creditcode, " +
        "operation, " +
        "recordgentime, " +
        "status, " +
        "batchfilename, " +
        "responsecode, " +
        "responseinfo, " +
        "receivetime, " +
        "protime " +
        "FROM " +
        "DIM.DIM_TBL_GbSectionOwnerDic ")
  }
}
