package com.dmall.scf.action

import com.dmall.scf.SparkAction
import com.dmall.scf.action.HistoryFangKuang180.CLASS_NAME
import com.dmall.scf.dto.SupplierRunFieldValue
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * @descrption
 * 20天内有进货记录天数(19)
 * 生产hive->生产mysql-字段同步(20天内有进货记录天数)
 * @author wangxuexing
 * @date 2019/12/26
 */
object HistoryGoodIn20 extends SparkAction[SupplierRunFieldValue]{
  val CLASS_NAME = this.getClass.getSimpleName().filter(!_.equals('$'))

  /**
   * 处理
   *
   * @param spark
   * @return
   */
  override def action(spark: SparkSession, args: Array[String]): DataFrame = {
    val currentDate = args(1)//format: yyyy-MM-dd
    val sql = s"""SELECT 1 company_id,
                        t.supplier_id supplier_id,
                        '${currentDate}' AS syn_date,
                        19 field_id,
                        sum(t.field_value) field_value
                 FROM
                   (SELECT b.company_id supplier_id,
                           a.receipt_date syn_date,
                           CASE
                               WHEN count(1)>0 THEN 1
                               ELSE 0
                           END field_value
                    FROM
                      (SELECT a.supplier_code,
                              a.receipt_no,
                              min(to_date(a.receipt_date)) receipt_date
                       FROM wumart2dmall.wm_ods_cx_vrm_recipet_header_inc_scf a
                       WHERE a.status=1
                         AND a.order_type=1
                         AND to_date(a.receipt_date) BETWEEN date_sub('${currentDate}',20)
                         AND date_sub('${currentDate}',1)
                         AND a.receipt_date<>'null'
                       GROUP BY a.supplier_code,
                                a.receipt_no) a
                    JOIN wumart2dmall.wm_ods_cx_supplier_card_info b ON a.supplier_code = b.card_code
                    AND b.audit_status = '2'
                    GROUP BY b.company_id,
                             a.receipt_date ) t
                 GROUP BY t.supplier_id"""
    val result = spark.sql(sql)
    println(s"Class ${CLASS_NAME} row count: ${result.count()}")
    result
  }

  /**
   * 保存mode及表名
   * @return
   */
  override def saveTable: (SaveMode, String) = (SaveMode.Append, "scfc_supplier_run_field_value")
}
