package com.bbmall.dws;

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;

import static org.apache.spark.sql.functions.*;
import org.apache.spark.sql.SaveMode;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;

public class DwsSalesProcessor {

    private SparkSession spark;
    private String processDate;

    public DwsSalesProcessor(SparkSession spark, String processDate) {
        this.spark = spark;
        this.processDate = processDate;
    }

    public void processAllTables() {
        try {
            System.out.println("开始处理DWD到DWS层数据，处理日期: " + processDate);

            // 1. 处理销售汇总宽表
            processSalesSummaryDaily();

            // 2. 处理商品分类销售表
            processCategorySalesDaily();

            // 3. 处理库存预警表
            processInventoryAlert();

            // 4. 处理促销效果表
            processPromotionEffect();

            // 5. 处理实时销售流水表
            processRealtimeSalesStream();

            System.out.println("DWD到DWS层数据处理完成");

        } catch (Exception e) {
            System.err.println("处理过程中发生错误: " + e.getMessage());
            e.printStackTrace();
            throw e;
        }
    }

    /**
     * 1. 销售汇总宽表处理
     */
    private void processSalesSummaryDaily() {
        System.out.println("开始处理销售汇总宽表...");

        // 读取DWD销售事实表 - 增量处理指定日期数据
        Dataset<Row> dwdSalesDF = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 计算核心指标
        Dataset<Row> salesSummary = dwdSalesDF
                .groupBy("sale_date")
                .agg(
                        sum("sales_amount").as("total_sales_amount"),
                        countDistinct("transaction_id").as("total_order_count"),
                        countDistinct("member_id").as("total_customer_count"),
                        expr("sum(sales_amount) / count(DISTINCT member_id)").as("avg_customer_spent"),
                        first("day_of_week").as("day_of_week")
                )
                .withColumn("create_time", current_timestamp());

        // 写入DWS层 - 动态分区覆盖
        salesSummary.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("dws.dws_sales_summary_daily");

        System.out.println("销售汇总宽表处理完成，处理记录数: " + salesSummary.count());
    }

    /**
     * 2. 商品分类销售表处理
     */
    private void processCategorySalesDaily() {
        System.out.println("开始处理商品分类销售表...");

        // 读取DWD层数据
        Dataset<Row> dwdSalesDF = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        Dataset<Row> dwdProductDF = spark.table("dwd.dwd_dim_product")
                .filter(col("dt").equalTo(processDate))
                .filter(col("is_active").equalTo(1));

        // 关联商品维度获取分类信息
        Dataset<Row> salesWithCategory = dwdSalesDF.alias("sales")
                .join(dwdProductDF.alias("product"),
                        col("sales.product_code").equalTo(col("product.product_code")))
                .select(
                        col("sales.sale_date"),
                        col("product.product_category"),
                        col("sales.sales_amount")
                );

        // 计算每日总销售额
        Dataset<Row> dailyTotalSales = dwdSalesDF
                .groupBy("sale_date")
                .agg(sum("sales_amount").as("total_daily_sales_amount"));

        // 按分类聚合并计算占比
        Dataset<Row> categorySales = salesWithCategory
                .groupBy("sale_date", "product_category")
                .agg(sum("sales_amount").as("category_sales_amount"))
                .alias("cat")
                .join(dailyTotalSales.alias("total"),
                        col("cat.sale_date").equalTo(col("total.sale_date")))
                .select(
                        col("cat.sale_date"),
                        col("cat.product_category"),
                        col("cat.category_sales_amount"),
                        expr("cat.category_sales_amount / total.total_daily_sales_amount").as("category_sales_ratio")
                )
                .withColumn("create_time", current_timestamp());

        // 写入DWS层
        categorySales.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("dws.dws_category_sales_daily");

        System.out.println("商品分类销售表处理完成，处理记录数: " + categorySales.count());
    }

    /**
     * 3. 库存预警表处理
     */
    private void processInventoryAlert() {
        System.out.println("开始处理库存预警表...");

        // 读取DWD层数据
        Dataset<Row> dwdInventoryDF = spark.table("dwd.dwd_fact_inventory_snapshot")
                .filter(col("snapshot_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        Dataset<Row> dwdProductDF = spark.table("dwd.dwd_dim_product")
                .filter(col("dt").equalTo(processDate))
                .filter(col("is_active").equalTo(1));

        // 关联商品信息
        Dataset<Row> inventoryAlert = dwdInventoryDF.alias("inv")
                .join(dwdProductDF.alias("product"),
                        col("inv.product_code").equalTo(col("product.product_code")))
                .select(
                        col("inv.snapshot_date").as("stat_date"),
                        col("product.product_code"),
                        col("product.product_name"),
                        col("inv.current_stock"),
                        col("inv.safety_stock")
                )
                .withColumn("create_time", current_timestamp());

        // 写入DWS层 - 全量覆盖
        inventoryAlert.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("dws.dws_inventory_alert");

        System.out.println("库存预警表处理完成，处理记录数: " + inventoryAlert.count());
    }

    /**
     * 4. 促销效果表处理
     */
    private void processPromotionEffect() {
        System.out.println("开始处理促销效果表...");

        // 计算促销期间数据
        Dataset<Row> promotionSalesDF = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate))
                .filter(not(col("promotion_type").equalTo("无促销")));

        // 如果有促销数据才处理
        if (promotionSalesDF.count() > 0) {
            // 按促销类型聚合
            Dataset<Row> promotionSummary = promotionSalesDF
                    .groupBy("sale_date", "promotion_type")
                    .agg(sum("sales_amount").as("promotion_sales_amount"));

            // 计算基准期数据（前7天平均值）
            LocalDate currentDate = LocalDate.parse(processDate, DateTimeFormatter.ISO_DATE);
            String baselineStartDate = currentDate.minusDays(14).toString();
            String baselineEndDate = currentDate.minusDays(8).toString();

            Dataset<Row> baselineSalesDF = spark.table("dwd.dwd_fact_sales")
                    .filter(col("sale_date").between(baselineStartDate, baselineEndDate))
                    .filter(col("dt").between(baselineStartDate, baselineEndDate));

            Dataset<Row> baselineSummary = baselineSalesDF
                    .groupBy("promotion_type")
                    .agg(avg("sales_amount").as("baseline_sales_amount"));

            // 关联计算增长率
            Dataset<Row> promotionEffect = promotionSummary.alias("promo")
                    .join(baselineSummary.alias("base"),
                            col("promo.promotion_type").equalTo(col("base.promotion_type")))
                    .select(
                            col("promo.sale_date"),
                            col("promo.promotion_type"),
                            col("promo.promotion_sales_amount"),
                            col("base.baseline_sales_amount"),
                            expr("(promo.promotion_sales_amount - base.baseline_sales_amount) / base.baseline_sales_amount")
                                    .as("sales_growth_percent")
                    )
                    .withColumn("create_time", current_timestamp());

            // 写入DWS层
            promotionEffect.write()
                    .mode(SaveMode.Overwrite)
                    .option("partitionOverwriteMode", "dynamic")
                    .partitionBy("dt")
                    .saveAsTable("dws.dws_promotion_effect");

            System.out.println("促销效果表处理完成，处理记录数: " + promotionEffect.count());
        } else {
            System.out.println("当日无促销数据，跳过促销效果表处理");
        }
    }

    /**
     * 5. 实时销售流水表处理
     */
    private void processRealtimeSalesStream() {
        System.out.println("开始处理实时销售流水表...");

        // 读取DWD销售事实表 - 获取当日所有流水数据
        Dataset<Row> dwdSalesDF = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 关联商品和门店维度获取完整信息
        Dataset<Row> dwdProductDF = spark.table("dwd.dwd_dim_product")
                .filter(col("dt").equalTo(processDate))
                .filter(col("is_active").equalTo(1));

        Dataset<Row> dwdStoreDF = spark.table("dwd.dwd_dim_store")
                .filter(col("dt").equalTo(processDate))
                .filter(col("status").equalTo(1));

        // 构建实时流水数据
        Dataset<Row> realtimeSales = dwdSalesDF.alias("sales")
                .join(dwdProductDF.alias("product"),
                        col("sales.product_code").equalTo(col("product.product_code")))
                .join(dwdStoreDF.alias("store"),
                        col("sales.store_code").equalTo(col("store.store_code")))
                .select(
                        col("sales.transaction_id"),
                        col("sales.sale_datetime"),
                        col("sales.sale_date"),
                        col("sales.store_code"),
                        col("sales.product_code"),
                        col("sales.sales_amount")
                )
                .withColumn("create_time", current_timestamp());

        // 写入DWS层
        realtimeSales.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("dws.dws_realtime_sales_stream");

        System.out.println("实时销售流水表处理完成，处理记录数: " + realtimeSales.count());
    }
}