package com.dmall.scf.action

import com.dmall.scf.SparkAction
import com.dmall.scf.action.HistoryFangKuang180.CLASS_NAME
import com.dmall.scf.dto.SupplierRunFieldValue
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * @descrption
 * 15天内有销售记录天数（22）
 * 生产hive->生产mysql-字段同步(15天内有销售记录天数)
 * @author wangxuexing
 * @date 2019/12/26
 */
object HistorySaleDays15  extends SparkAction[SupplierRunFieldValue]{
  val CLASS_NAME = this.getClass.getSimpleName().filter(!_.equals('$'))
  /**
   * 处理
   *
   * @param spark
   * @return
   */
  override def action(spark: SparkSession, args: Array[String]): DataFrame = {
    val currentDate = args(1)//format: yyyy-MM-dd
    val sql = s"""SELECT 1 company_id,
                           t.supplier_id,
                           '${currentDate}' AS syn_date,
                           22 field_id,
                           sum(t.days) field_value
                    FROM
                      (SELECT b.company_id supplier_id,
                              a.dt,
                              CASE
                                  WHEN count(1)>0 THEN 1
                                  ELSE 0
                              END days
                       FROM wumart2dmall.wm_dw_site_merch_sale_day a
                       JOIN wumart2dmall.wm_ods_cx_supplier_card_info b ON substr(a.lifnr,5) = b.card_code
                       AND b.audit_status = '2'
                       WHERE a.dt BETWEEN date_format(date_sub(to_date('${currentDate}'), 15),'yyyyMMdd')
                       AND date_format(date_sub(to_date('${currentDate}'), 1),'yyyyMMdd')
                         AND length(lifnr)=10
                       GROUP BY b.company_id,
                                dt) t
                    GROUP BY t.supplier_id"""
    val result = spark.sql(sql)
    println(s"Class ${CLASS_NAME} row count: ${result.count()}")
    result
  }

  /**
   * 保存mode及表名
   * @return
   */
  override def saveTable: (SaveMode, String) = (SaveMode.Append, "scfc_supplier_run_field_value")
}
