package com.bbmall.dws;

import org.apache.spark.sql.*;

import static org.apache.spark.sql.functions.*;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;

public class DWSStoreProcessor {

    private SparkSession spark;
    private String processDate;

    // 配置常量
    private static final String DWD_DATABASE = "dwd";
    private static final String DWS_DATABASE = "dws";
    private static final double LOW_EFFICIENCY_THRESHOLD = 30000.0;
    private static final double MEDIUM_EFFICIENCY_THRESHOLD = 40000.0;

    public DWSStoreProcessor(SparkSession spark, String processDate) {
        this.spark = spark;
        this.processDate = processDate;
    }

    /**
     * 处理门店日汇总宽表
     */
    public void processStoreDailySummary() {
        System.out.println("开始处理门店日汇总宽表，日期: " + processDate);

        try {
            // 读取DWD层销售事实表
            Dataset<Row> salesDf = spark.table(DWD_DATABASE + ".fact_sales")
                    .where(col("dt").equalTo(processDate))
                    .filter(col("store_id").isNotNull().and(col("sales_amount").isNotNull()));

            // 读取DWD层门店维度表
            Dataset<Row> storeDf = spark.table(DWD_DATABASE + ".dim_store")
                    .where(col("dt").equalTo(processDate))
                    .select("store_id", "store_name", "city", "store_status", "area_sqm");

            // 读取DWD层库存表
            Dataset<Row> inventoryDf = spark.table(DWD_DATABASE + ".fact_inventory")
                    .where(col("dt").equalTo(processDate))
                    .groupBy("store_id")
                    .agg(avg("inventory_value").as("avg_inventory"));

            // 计算门店日汇总指标
            Dataset<Row> dailySummary = salesDf
                    .groupBy("store_id", "date_id")
                    .agg(
                            sum("sales_amount").as("daily_sales"),
                            countDistinct("customer_id").as("daily_customers"),
                            count("order_id").as("order_count"),
                            avg("sales_amount").as("avg_basket_value"),
                            avg("commission_rate").as("commission_rate"),
                            avg("income_rate").as("income_rate"),
                            first("tier_level").as("tier_level")
                    );

            // 修复join方法 - Spark 2.4.8兼容版本
            Dataset<Row> joinedWithStore = dailySummary
                    .join(storeDf, dailySummary.col("store_id").equalTo(storeDf.col("store_id")), "left")
                    .select(
                            dailySummary.col("store_id"),
                            dailySummary.col("date_id"),
                            storeDf.col("store_name"),
                            storeDf.col("city"),
                            storeDf.col("store_status"),
                            dailySummary.col("daily_sales"),
                            dailySummary.col("daily_customers"),
                            dailySummary.col("order_count"),
                            dailySummary.col("avg_basket_value"),
                            storeDf.col("area_sqm"),
                            dailySummary.col("commission_rate"),
                            dailySummary.col("income_rate"),
                            dailySummary.col("tier_level")
                    );

            // 继续join库存信息
            Dataset<Row> dwsResult = joinedWithStore
                    .join(inventoryDf, joinedWithStore.col("store_id").equalTo(inventoryDf.col("store_id")), "left")
                    .select(
                            joinedWithStore.col("store_id"),
                            joinedWithStore.col("date_id"),
                            joinedWithStore.col("store_name"),
                            joinedWithStore.col("city"),
                            joinedWithStore.col("store_status"),
                            joinedWithStore.col("daily_sales"),
                            joinedWithStore.col("daily_customers"),
                            joinedWithStore.col("order_count"),
                            joinedWithStore.col("avg_basket_value"),
                            joinedWithStore.col("area_sqm"),
                            when(inventoryDf.col("avg_inventory").gt(0),
                                    joinedWithStore.col("daily_sales").divide(inventoryDf.col("avg_inventory")))
                                    .otherwise(lit(0)).as("inventory_turnover"),
                            joinedWithStore.col("commission_rate"),
                            joinedWithStore.col("income_rate"),
                            joinedWithStore.col("tier_level")
                    );

            // 写入DWS层
            dwsResult
                    .withColumn("dt", lit(processDate.replace("-", "")))
                    .write()
                    .mode(SaveMode.Overwrite)
                    .insertInto(DWS_DATABASE + ".dws_store_daily_summary");

            System.out.println("门店日汇总宽表处理完成");

        } catch (Exception e) {
            System.err.println("处理门店日汇总宽表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 处理门店绩效指标表
     */
    public void processStorePerformanceMetrics() {
        System.out.println("开始处理门店绩效指标表，日期: " + processDate);

        try {
            // 计算开始日期（最近30天）
            LocalDate currentDate = LocalDate.parse(processDate);
            String startDate = currentDate.minusDays(30).format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));

            // 读取DWS日汇总表最近30天数据
            Dataset<Row> recentSales = spark.table(DWS_DATABASE + ".dws_store_daily_summary")
                    .where(col("dt").geq(startDate.replace("-", "")).and(col("dt").leq(processDate.replace("-", ""))))
                    .filter(col("daily_sales").isNotNull().and(col("store_status").equalTo("营业中")));

            // 计算近7天开始日期
            String sevenDaysAgo = currentDate.minusDays(7).format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));

            // 计算绩效指标 - 修复when条件
            Column sevenDaysCondition = when(col("date_id").geq(sevenDaysAgo), col("daily_sales"));

            Dataset<Row> performanceMetrics = recentSales
                    .groupBy("store_id")
                    .agg(
                            // 近7天日均销售额
                            avg(sevenDaysCondition).as("avg_daily_sales_7d"),
                            // 近30天日均销售额
                            avg(col("daily_sales")).as("avg_daily_sales_30d"),
                            // 坪效计算
                            avg(when(col("area_sqm").gt(0), col("daily_sales").divide(col("area_sqm")))
                                    .otherwise(lit(0))).as("sales_per_sqm"),
                            // 绩效评分
                            avg(col("daily_sales")).divide(1000).plus(avg(col("daily_sales").divide(col("area_sqm"))))
                                    .as("performance_score")
                    )
                    .withColumn("alert_level",
                            when(col("avg_daily_sales_7d").lt(LOW_EFFICIENCY_THRESHOLD), "高")
                                    .when(col("avg_daily_sales_7d").lt(MEDIUM_EFFICIENCY_THRESHOLD), "中")
                                    .otherwise("低"))
                    .withColumn("stat_date", lit(processDate));

            // 写入DWS层
            performanceMetrics
                    .withColumn("dt", lit(processDate.replace("-", "")))
                    .write()
                    .mode(SaveMode.Overwrite)
                    .insertInto(DWS_DATABASE + ".dws_store_performance_metrics");

            System.out.println("门店绩效指标表处理完成");

        } catch (Exception e) {
            System.err.println("处理门店绩效指标表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 处理库存周转日分析表
     */
    public void processInventoryTurnover() {
        System.out.println("开始处理库存周转日分析表，日期: " + processDate);

        try {
            // 读取DWD层销售数据
            Dataset<Row> salesData = spark.table(DWD_DATABASE + ".fact_sales")
                    .where(col("dt").equalTo(processDate))
                    .groupBy("store_id", "date_id", "product_category")
                    .agg(sum("sales_amount").as("daily_sales"));

            // 读取DWD层库存数据
            Dataset<Row> inventoryData = spark.table(DWD_DATABASE + ".fact_inventory")
                    .where(col("dt").equalTo(processDate))
                    .groupBy("store_id", "date_id", "product_category")
                    .agg(avg("inventory_value").as("avg_inventory"));

            // 计算库存周转率 - 修复join方法
            Dataset<Row> turnoverData = salesData
                    .join(inventoryData,
                            salesData.col("store_id").equalTo(inventoryData.col("store_id"))
                                    .and(salesData.col("date_id").equalTo(inventoryData.col("date_id")))
                                    .and(salesData.col("product_category").equalTo(inventoryData.col("product_category"))),
                            "inner")
                    .select(
                            salesData.col("store_id"),
                            salesData.col("date_id"),
                            salesData.col("daily_sales"),
                            inventoryData.col("avg_inventory"),
                            when(inventoryData.col("avg_inventory").gt(0),
                                    salesData.col("daily_sales").divide(inventoryData.col("avg_inventory")))
                                    .otherwise(lit(0)).as("turnover_rate"),
                            salesData.col("product_category").as("category_type")
                    );

            // 写入DWS层
            turnoverData
                    .withColumn("dt", lit(processDate.replace("-", "")))
                    .write()
                    .mode(SaveMode.Overwrite)
                    .insertInto(DWS_DATABASE + ".dws_inventory_turnover_daily");

            System.out.println("库存周转日分析表处理完成");

        } catch (Exception e) {
            System.err.println("处理库存周转日分析表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 整合处理所有DWS表
     */
    public void processAllTables() {
        System.out.println("开始处理所有DWS层表，日期: " + processDate);

        Map<String, Runnable> processMethods = new HashMap<>();
        processMethods.put("门店日汇总宽表", this::processStoreDailySummary);
        processMethods.put("门店绩效指标表", this::processStorePerformanceMetrics);
        processMethods.put("库存周转日分析表", this::processInventoryTurnover);

        for (Map.Entry<String, Runnable> entry : processMethods.entrySet()) {
            String tableName = entry.getKey();
            Runnable method = entry.getValue();

            try {
                System.out.println("开始处理: " + tableName);
                method.run();
                System.out.println(tableName + " 处理成功");
            } catch (Exception e) {
                System.err.println(tableName + " 处理失败: " + e.getMessage());
                e.printStackTrace();
                // 可以选择继续处理其他表，或者直接退出
                // throw new RuntimeException(tableName + "处理失败", e);
            }
        }

        System.out.println("所有DWS层表处理完成");
    }

    /**
     * 主方法 - 程序入口
     */
    public static void main(String[] args) {
        // 创建Spark会话
        SparkSession spark = SparkSession.builder()
                .appName("DWDToDWSProcessor")
                .enableHiveSupport()
                .config("spark.sql.adaptive.enabled", "false") // Spark 2.4不支持adaptive
                .config("spark.sql.adaptive.coalescePartitions.enabled", "false") // Spark 2.4不支持
                .getOrCreate();

        // 设置日志级别
        spark.sparkContext().setLogLevel("WARN");

        // 获取处理日期（默认昨天）
        String processDate;
        if (args.length > 0) {
            processDate = args[0];
        } else {
            processDate = LocalDate.now().minusDays(1).format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
        }

        System.out.println("DWD到DWS数据处理开始，处理日期: " + processDate);

        try {
            // 创建处理器实例
            DWSStoreProcessor processor = new DWSStoreProcessor(spark, processDate);

            // 执行所有表处理
            processor.processAllTables();

            System.out.println("DWD到DWS数据处理完成");

        } catch (Exception e) {
            System.err.println("数据处理过程中出现错误: " + e.getMessage());
            e.printStackTrace();
            System.exit(1);
        } finally {
            spark.stop();
        }
    }
}