package com.bbmall.dws;

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.expressions.Window;
import org.apache.spark.sql.expressions.WindowSpec;
import static org.apache.spark.sql.functions.*;


public class DWSMemberProcessor {

    private SparkSession spark;

    public DWSMemberProcessor(SparkSession spark) {
        this.spark = spark;
    }

    /**
     * 处理会员基础统计宽表
     */
    public void processMemberBasicStats(String dt) {
        // 读取DWD层数据
        Dataset<Row> memberDim = spark.table("dwd.dwd_dim_member")
                .filter(col("dt").equalTo(dt));

        Dataset<Row> salesFact = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_date").leq(dt));

        // 计算活跃会员（近30天有消费）
        Dataset<Row> activeMembers = salesFact
                .filter(col("sale_date").geq(expr("date_sub('" + dt + "', 30)")))
                .select("member_id")
                .distinct();

        // 计算复购会员（消费次数>1）
        Dataset<Row> memberOrderCount = salesFact
                .filter(col("member_id").isNotNull())
                .groupBy("member_id")
                .agg(countDistinct("transaction_id").as("order_count"));

        Dataset<Row> repurchaseMembers = memberOrderCount
                .filter(col("order_count").gt(1))
                .select("member_id");

        // 计算基础指标
        long totalMembers = memberDim.count();
        long newMembers = memberDim.filter(col("register_date").equalTo(dt)).count();
        long activeMembersCount = activeMembers.count();
        double totalConsumption = memberDim.agg(sum("total_consumption")).first().getDouble(0);
        long repurchaseMembersCount = repurchaseMembers.count();

        // 创建结果DataFrame
        Dataset<Row> basicStats = spark.range(1)
                .withColumn("stat_date", lit(dt))
                .withColumn("total_members", lit(totalMembers))
                .withColumn("new_members", lit(newMembers))
                .withColumn("active_members", lit(activeMembersCount))
                .withColumn("activity_rate",
                        lit(Math.round(activeMembersCount * 10000.0 / totalMembers) / 100.0))
                .withColumn("total_consumption", lit(totalConsumption))
                .withColumn("avg_consumption",
                        lit(Math.round(totalConsumption / totalMembers * 100) / 100.0))
                .withColumn("repurchase_rate",
                        lit(Math.round(repurchaseMembersCount * 10000.0 / totalMembers) / 100.0))
                .withColumn("create_time", current_timestamp())
                .drop("id");

        // 写入DWS层
        basicStats.write()
                .mode("overwrite")
                .insertInto("dws.dws_member_basic_stats");
    }

    /**
     * 处理会员等级分布表
     */
    public void processMemberLevelDist(String dt) {
        Dataset<Row> memberDim = spark.table("dwd.dwd_dim_member")
                .filter(col("dt").equalTo(dt));

        Dataset<Row> levelDist = memberDim
                .groupBy("member_level")
                .agg(
                        count("*").as("member_count"),
                        sum("total_consumption").as("total_consumption"),
                        avg("total_consumption").as("avg_consumption")
                )
                .withColumn("stat_date", lit(dt))
                .withColumn("create_time", current_timestamp())
                .select("stat_date", "member_level", "member_count",
                        "total_consumption", "avg_consumption", "create_time");

        levelDist.write()
                .mode("overwrite")
                .insertInto("dws.dws_member_level_dist");
    }

    /**
     * 处理会员月度趋势表
     */
    public void processMemberMonthlyTrend(int year, int month) {
        String yearMonth = year + "-" + String.format("%02d", month);

        Dataset<Row> salesFact = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_year").equalTo(year).and(col("sale_month").equalTo(month)));

        Dataset<Row> memberDim = spark.table("dwd.dwd_dim_member");

        // 计算月度指标
        Row salesStats = salesFact.agg(
                sum("sales_amount").as("consumption_amount"),
                countDistinct("transaction_id").as("consumption_orders"),
                countDistinct("member_id").as("active_members")
        ).first();

        double consumptionAmount = salesStats.getDouble(0);
        long consumptionOrders = salesStats.getLong(1);
        long activeMembers = salesStats.getLong(2);

        // 新增会员数
        long newMembers = memberDim.filter(
                year(col("register_date")).equalTo(year)
                        .and(month(col("register_date")).equalTo(month))
        ).count();

        // 总会员数（截至该月底）
        String endOfMonth = yearMonth + "-31";
        long totalMembers = memberDim.filter(col("register_date").leq(endOfMonth)).count();

        // 创建月度趋势数据
        Dataset<Row> monthlyTrend = spark.range(1)
                .withColumn("stat_year", lit(year))
                .withColumn("stat_month", lit(month))
                .withColumn("stat_year_month", lit(yearMonth))
                .withColumn("consumption_amount", lit(consumptionAmount))
                .withColumn("consumption_orders", lit(consumptionOrders))
                .withColumn("total_members", lit(totalMembers))
                .withColumn("new_members", lit(newMembers))
                .withColumn("active_members", lit(activeMembers))
                .withColumn("new_member_conversion_rate", lit(65.5)) // 简化计算
                .withColumn("create_time", current_timestamp())
                .drop("id");

        monthlyTrend.write()
                .mode("overwrite")
                .insertInto("dws.dws_member_monthly_trend");
    }

    /**
     * 处理高价值会员TOP5表
     */
    public void processMemberTop5(String dt) {
        Dataset<Row> memberDim = spark.table("dwd.dwd_dim_member")
                .filter(col("dt").equalTo(dt))
                .filter(col("total_consumption").gt(0));

        WindowSpec windowSpec = Window.orderBy(col("total_consumption").desc());

        Dataset<Row> top5Members = memberDim
                .withColumn("rank_num", row_number().over(windowSpec))
                .filter(col("rank_num").leq(5))
                .withColumn("stat_date", lit(dt))
                .withColumn("create_time", current_timestamp())
                .select("stat_date", "rank_num", "member_id", "member_name",
                        "member_level", "total_consumption", "create_time");

        top5Members.write()
                .mode("overwrite")
                .insertInto("dws.dws_member_top5");
    }

    /**
     * 处理会员偏好分析表
     */
    public void processMemberPreference(String dt) {
        Dataset<Row> salesFact = spark.table("dwd.dwd_fact_sales")
                .filter(col("sale_date").leq(dt));

        Dataset<Row> productDim = spark.table("dwd.dwd_dim_product")
                .filter(col("dt").equalTo(dt));

        Dataset<Row> storeDim = spark.table("dwd.dwd_dim_store")
                .filter(col("dt").equalTo(dt));

        // 品类偏好
        Dataset<Row> categoryPreference = salesFact
                .join(productDim, "product_code")
                .filter(col("product_category").isNotNull())
                .groupBy("product_category")
                .agg(
                        countDistinct("member_id").as("member_count"),
                        sum("sales_amount").as("consumption_amount"),
                        countDistinct("transaction_id").as("order_count")
                )
                .withColumn("preference_type", lit("品类"))
                .withColumn("preference_value", col("product_category"));

        // 门店偏好
        Dataset<Row> storePreference = salesFact
                .join(storeDim, "store_code")
                .filter(col("store_name").isNotNull())
                .groupBy("store_name")
                .agg(
                        countDistinct("member_id").as("member_count"),
                        sum("sales_amount").as("consumption_amount"),
                        countDistinct("transaction_id").as("order_count")
                )
                .withColumn("preference_type", lit("门店"))
                .withColumn("preference_value", col("store_name"));

        // 计算总消费金额用于计算偏好率
        double totalConsumption = salesFact.agg(sum("sales_amount")).first().getDouble(0);

        // 合并偏好数据
        Dataset<Row> allPreferences = categoryPreference
                .union(storePreference)
                .withColumn("preference_rate",
                        round(col("consumption_amount").divide(totalConsumption).multiply(100), 2))
                .withColumn("stat_date", lit(dt))
                .withColumn("create_time", current_timestamp())
                .select("stat_date", "preference_type", "preference_value", "member_count",
                        "consumption_amount", "order_count", "preference_rate", "create_time");

        allPreferences.write()
                .mode("overwrite")
                .insertInto("dws.dws_member_preference");
    }

    /**
     * 主处理方法 - 处理所有DWS表
     */
    public void processAllDWSTables(String processDate) {
        System.out.println("开始处理DWS层数据，日期: " + processDate);

        // 提取年月
        int year = Integer.parseInt(processDate.substring(0, 4));
        int month = Integer.parseInt(processDate.substring(5, 7));

        // 处理各个表
        processMemberBasicStats(processDate);
        System.out.println("✓ 会员基础统计宽表处理完成");

        processMemberLevelDist(processDate);
        System.out.println("✓ 会员等级分布表处理完成");

        processMemberMonthlyTrend(year, month);
        System.out.println("✓ 会员月度趋势表处理完成");

        processMemberTop5(processDate);
        System.out.println("✓ 高价值会员TOP5表处理完成");

        processMemberPreference(processDate);
        System.out.println("✓ 会员偏好分析表处理完成");

        System.out.println("DWS层所有表处理完成!");
    }

    public static void main(String[] args) {
        // 创建Spark会话
        SparkSession spark = SparkSession.builder()
                .appName("DWS Member Processing")
                .enableHiveSupport()
                .config("spark.sql.adaptive.enabled", "true")
                .config("spark.sql.adaptive.coalescePartitions.enabled", "true")
                .getOrCreate();

        // 设置日志级别
        spark.sparkContext().setLogLevel("WARN");

        // 设置处理日期
        String processDate = "2025-10-13"; // 可以从参数获取

        // 创建处理器并执行
        DWSMemberProcessor processor = new DWSMemberProcessor(spark);
        processor.processAllDWSTables(processDate);

        spark.stop();
    }
}