package com.bbmall.dwd;

import com.bbmall.SparkSessionUtil;
import org.apache.spark.sql.SparkSession;


/**
 * 库存事实表，因为仓库数据量较小，所以使用快照事实表，不做增量处理
 */
public class OdsToDwdFactInventorySnapshot {

    public static void main(String[] args) {
        if (args.length < 1) {
            System.err.println("请传入业务日期（格式：yyyyMMdd），如：20251018");
            System.exit(1);
        }
        String bdpCalcDt = args[0];
        String odsDt = bdpCalcDt.substring(0, 4) + "-" + bdpCalcDt.substring(4, 6) + "-" + bdpCalcDt.substring(6, 8);

        SparkSession spark = SparkSessionUtil.getSparkSession("ODS_TO_DWD_FACT_INVENTORY_SNAPSHOT");

        try {
            String sql = "INSERT INTO TABLE dwd.dwd_fact_inventory_snapshot PARTITION (snapshot_date) " +
                    "SELECT " +
                    "  store_code,             -- 门店编码" +
                    "  product_code,           -- 商品编码" +
                    "  TO_DATE(last_updated) AS snapshot_date, -- 快照日期（退化维度）" +
                    "  YEAR(last_updated) AS snapshot_year,    -- 快照年份" +
                    "  MONTH(last_updated) AS snapshot_month,  -- 快照月份" +
                    "  QUARTER(last_updated) AS snapshot_quarter, -- 快照季度" +
                    "  DAYOFWEEK(last_updated) AS day_of_week,  -- 星期几" +
                    "  CASE WHEN DAYOFWEEK(last_updated) IN (6,7) THEN 1 ELSE 0 END AS is_weekend, -- 是否周末" +
                    "  current_stock,          -- 当前库存" +
                    "  safety_stock,           -- 安全库存" +
                    "  last_updated,           -- 最后更新时间" +
                    "  CURRENT_TIMESTAMP() AS create_time       -- 创建时间" +
                    "  -- 分区字段：快照日期" +
                    ", TO_DATE(last_updated) AS snapshot_date " +
                    "FROM ods.ods_inventory_info " +
                    "WHERE dt = '" + odsDt + "';";

            spark.sql(sql);
            System.out.println("库存快照事实表（dwd_fact_inventory_snapshot）" + bdpCalcDt + "数据抽取完成！");

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (spark != null) spark.stop();
        }
    }
}