package com.dmall.scf.action

import com.dmall.scf.SparkAction
import com.dmall.scf.dto.SupplierRunFieldValue
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * @descrption
 * 近180天放款金额（T-1至T-180）（39）
 * 生产hive->生产mysql-字段同步(近180天放款金额)
 * @author wangxuexing
 * @date 2019/12/26
 */
object HistoryFangKuang180 extends SparkAction[SupplierRunFieldValue]{
  val CLASS_NAME = this.getClass.getSimpleName().filter(!_.equals('$'))

  /**
   * 处理
   *
   * @param spark
   * @return
   */
  override def action(spark: SparkSession, args: Array[String]): DataFrame = {
    val currentDate = args(1)//format: yyyy-MM-dd
    val sql = s"""SELECT 1 company_id,
                                   b.company_id supplier_id,
                                   '${currentDate}' AS syn_date,
                                   39 field_id,
                                   sum(a.fafangje) field_value
                            FROM wumart2dmall.wm_ods_jrbl_loan_dkzhxx a
                            JOIN wumart2dmall.wm_ods_cx_supplier_card_info b ON a.gshkahao = b.card_code
                            AND b.audit_status = '2'
                            WHERE to_date(a.gxinshij) BETWEEN date_sub('${currentDate}',180)
                            AND date_sub('${currentDate}',1)
                            GROUP BY b.company_id"""
    val result = spark.sql(sql)
    println(s"Class ${CLASS_NAME} row count: ${result.count()}")
    result
  }

  /**
   * 保存mode及表名
   * @return
   */
  override def saveTable: (SaveMode, String) = (SaveMode.Append, "scfc_supplier_run_field_value")
}
