package com.dmall.scf.action

import com.dmall.scf.SparkAction
import com.dmall.scf.action.HistoryFangKuang180.CLASS_NAME
import com.dmall.scf.dto.SupplierRunFieldValue
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * @descrption
 *  近90天销售成本（T-1至T-90）(25)
 *  生产hive->生产mysql-字段同步(近90天销售成本)
 * @author wangxuexing
 * @date 2019/12/26
 */
object HistorySaleCost90 extends SparkAction[SupplierRunFieldValue]{
  val CLASS_NAME = this.getClass.getSimpleName().filter(!_.equals('$'))
  /**
   * 处理
   *
   * @param spark
   * @return
   */
  override def action(spark: SparkSession, args: Array[String]): DataFrame = {
    val currentDate = args(1)//format: yyyy-MM-dd
    val sql = s"""SELECT 1 company_id,
                         b.company_id supplier_id,
                         '${currentDate}' AS syn_date,
                         25 field_id,
                         sum(a.salecost) field_value
                  FROM wumart2dmall.wm_dw_site_merch_sale_day a
                  JOIN wumart2dmall.wm_ods_cx_supplier_card_info b ON substr(a.lifnr,5) = b.card_code
                  AND b.audit_status = '2'
                  WHERE a.dt BETWEEN date_format(date_sub(to_date('${currentDate}'),90),'yyyyMMdd')
                    AND date_format(date_sub(to_date('${currentDate}'), 1), 'yyyyMMdd')
                    AND length(a.lifnr)=10
                  GROUP BY b.company_id"""
    val result = spark.sql(sql)
    println(s"Class ${CLASS_NAME} row count: ${result.count()}")
    result
  }

  /**
   * 保存mode及表名
   * @return
   */
  override def saveTable: (SaveMode, String) = (SaveMode.Append, "scfc_supplier_run_field_value")
}
