package com.cmnit.analysis.dao

import com.cmnit.analysis.common.{TDao, TSql}
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

class CardIssueDao extends TSql with TDao {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 加工结果数据
   */
  def cardIssueData(acctTime: String, sqlTime: String): DataFrame = {
    sparkSQL(
      "SELECT " +
        "'" + acctTime + "' as statisHour, " +
        "NVL(SUM(CASE WHEN opertype = '9' THEN 1 ELSE 0 END),0) storeCardNum, " +
        "NVL(SUM(CASE WHEN opertype = '82' THEN 1 ELSE 0 END),0) chargeCardNum, " +
        "NVL(SUM(CASE WHEN opertype = '65' THEN 1 ELSE 0 END),0) obuNum " +
        "FROM " +
        "issue.tbl_posoperatewaste_new " +
        "WHERE  "
        + sqlTime +
        "AND status = '1' " +
        "AND opertype IN ('9','65','82') ")
  }
}
