package com.bbmall.ads;

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import static org.apache.spark.sql.functions.*;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.SparkConf;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;

public class DwsToAdsProcessor {

    private SparkSession spark;
    private String processDate;

    public static void main(String[] args) {
        if (args.length < 1) {
            System.err.println("Usage: DwsToAdsProcessor <process_date>");
            System.exit(1);
        }

        String processDate = args[0];

        // 创建Spark配置
        SparkConf conf = new SparkConf()
                .setAppName("DWS_to_ADS_Processing")
                .set("spark.sql.adaptive.enabled", "true")
                .set("spark.sql.adaptive.coalescePartitions.enabled", "true")
                .set("spark.sql.sources.partitionOverwriteMode", "dynamic")
                .set("spark.sql.hive.convertMetastoreParquet", "false");

        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .config(conf)
                .enableHiveSupport()
                .getOrCreate();

        try {
            // 创建处理器并执行
            DwsToAdsProcessor processor = new DwsToAdsProcessor(spark, processDate);
            processor.processAllTables();

            System.out.println("ADS层数据处理作业执行成功");

        } catch (Exception e) {
            System.err.println("作业执行失败: " + e.getMessage());
            System.exit(2);
        } finally {
            spark.stop();
        }
    }

    public DwsToAdsProcessor(SparkSession spark, String processDate) {
        this.spark = spark;
        this.processDate = processDate;
    }

    public void processAllTables() {
        try {
            System.out.println("开始处理DWS到ADS层数据，处理日期: " + processDate);

            // 1. 处理今日核心指标表
            processTodayKpi();

            // 2. 处理热销商品分类表
            processHotCategory();

            // 3. 处理库存预警表
            processInventoryAlert();

            // 4. 处理促销效果表
            processPromotionEffect();

            // 5. 处理实时销售流水表
            processRecentSales();

            // 6. 数据质量检查
            checkDataQuality();

            System.out.println("DWS到ADS层数据处理完成");

        } catch (Exception e) {
            System.err.println("处理过程中发生错误: " + e.getMessage());
            e.printStackTrace();
            throw e;
        }
    }

    /**
     * 1. 今日核心指标表处理
     */
    private void processTodayKpi() {
        System.out.println("开始处理今日核心指标表...");

        // 读取DWS销售汇总表
        Dataset<Row> dwsSalesSummary = spark.table("dws.dws_sales_summary_daily")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 直接映射到ADS表结构
        Dataset<Row> todayKpi = dwsSalesSummary
                .select(
                        col("sale_date").as("stat_date"),
                        col("total_sales_amount").as("today_sales_amount"),
                        col("total_order_count").as("today_order_count"),
                        col("total_customer_count").as("today_customer_count"),
                        col("avg_customer_spent").as("avg_customer_spent")
                );

        // 写入ADS层
        todayKpi.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("ads.ads_today_kpi");

        System.out.println("今日核心指标表处理完成，处理记录数: " + todayKpi.count());
    }

    /**
     * 2. 热销商品分类表处理
     */
    private void processHotCategory() {
        System.out.println("开始处理热销商品分类表...");

        // 读取DWS商品分类销售表
        Dataset<Row> dwsCategorySales = spark.table("dws.dws_category_sales_daily")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 直接映射到ADS表结构
        Dataset<Row> hotCategory = dwsCategorySales
                .select(
                        col("sale_date").as("stat_date"),
                        col("product_category"),
                        col("category_sales_ratio")
                );

        // 写入ADS层
        hotCategory.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("ads.ads_hot_category");

        System.out.println("热销商品分类表处理完成，处理记录数: " + hotCategory.count());
    }

    /**
     * 3. 库存预警表处理
     */
    private void processInventoryAlert() {
        System.out.println("开始处理库存预警表...");

        // 读取DWS库存预警表
        Dataset<Row> dwsInventoryAlert = spark.table("dws.dws_inventory_alert")
                .filter(col("stat_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 过滤需要预警的商品（库存小于安全库存）
        Dataset<Row> inventoryAlert = dwsInventoryAlert
                .filter(col("current_stock").lt(col("safety_stock")))
                .select(
                        col("stat_date"),
                        col("product_name"),
                        col("current_stock")
                );

        // 写入ADS层
        inventoryAlert.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("ads.ads_inventory_alert");

        System.out.println("库存预警表处理完成，预警商品数: " + inventoryAlert.count());
    }

    /**
     * 4. 促销效果表处理
     */
    private void processPromotionEffect() {
        System.out.println("开始处理促销效果表...");

        // 读取DWS促销效果表
        Dataset<Row> dwsPromotionEffect = spark.table("dws.dws_promotion_effect")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 检查是否有促销数据
        if (dwsPromotionEffect.count() == 0) {
            System.out.println("当日无促销数据，跳过促销效果表处理");
            return;
        }

        // 直接映射到ADS表结构
        Dataset<Row> promotionEffect = dwsPromotionEffect
                .select(
                        col("sale_date").as("stat_date"),
                        col("promotion_type"),
                        col("sales_growth_percent")
                );

        // 写入ADS层
        promotionEffect.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("ads.ads_promotion_effect");

        System.out.println("促销效果表处理完成，处理记录数: " + promotionEffect.count());
    }

    /**
     * 5. 实时销售流水表处理
     */
    private void processRecentSales() {
        System.out.println("开始处理实时销售流水表...");

        // 读取DWS实时销售流水表
        Dataset<Row> dwsRealtimeSales = spark.table("dws.dws_realtime_sales_stream")
                .filter(col("sale_date").equalTo(processDate))
                .filter(col("dt").equalTo(processDate));

        // 检查是否有数据
        if (dwsRealtimeSales.count() == 0) {
            System.out.println("当日无销售流水数据，跳过处理");
            return;
        }

        // 获取最新的时间戳作为参考点
        List<Row> maxTimeRows = dwsRealtimeSales
                .agg(max("sale_datetime").as("max_time"))
                .collectAsList();

        if (maxTimeRows.isEmpty() || maxTimeRows.get(0).isNullAt(0)) {
            System.out.println("当日无有效的销售时间数据，跳过处理");
            return;
        }

        String maxTime = maxTimeRows.get(0).getString(0);

        // 过滤最近2小时的数据
        Dataset<Row> recentSales = dwsRealtimeSales
                .filter(col("sale_datetime").geq(
                        expr("from_unixtime(unix_timestamp('" + maxTime + "') - 2*3600)")
                ))
                .select(
                        col("sale_datetime").as("sale_time"),
                        col("transaction_id"),
                        col("sales_amount")
                )
                .orderBy(desc("sale_time"))
                .limit(100); // 限制返回条数

        // 写入ADS层
        recentSales.write()
                .mode(SaveMode.Overwrite)
                .option("partitionOverwriteMode", "dynamic")
                .partitionBy("dt")
                .saveAsTable("ads.ads_recent_sales");

        System.out.println("实时销售流水表处理完成，处理记录数: " + recentSales.count());
    }

    /**
     * 数据质量检查
     */
    private void checkDataQuality() {
        System.out.println("开始数据质量检查...");

        // 检查各表数据量
        String[] tables = {
                "ads.ads_today_kpi", "ads.ads_hot_category",
                "ads.ads_inventory_alert", "ads.ads_promotion_effect",
                "ads.ads_recent_sales"
        };

        for (String table : tables) {
            try {
                long count = spark.table(table)
                        .filter(col("dt").equalTo(processDate))
                        .count();
                System.out.println("表 " + table + " 记录数: " + count);
            } catch (Exception e) {
                System.err.println("检查表 " + table + " 时出错: " + e.getMessage());
            }
        }

        // 检查核心指标数据完整性
        Dataset<Row> todayKpi = spark.table("ads.ads_today_kpi")
                .filter(col("dt").equalTo(processDate));

        if (todayKpi.count() == 0) {
            System.err.println("警告: 今日核心指标表无数据");
        } else {
            System.out.println("今日核心指标数据:");
            todayKpi.show();
        }

        System.out.println("数据质量检查完成");
    }

    /**
     * 可选：周销售趋势数据生成
     * 如果前端需要固定格式的周趋势数据，可以在这里生成
     */
    private void processWeeklyTrend() {
        System.out.println("开始生成周销售趋势数据...");

        // 获取最近7天的数据
        LocalDate currentDate = LocalDate.parse(processDate, DateTimeFormatter.ISO_DATE);
        String startDate = currentDate.minusDays(6).toString();

        Dataset<Row> weeklySales = spark.table("dws.dws_sales_summary_daily")
                .filter(col("sale_date").between(startDate, processDate))
                .filter(col("dt").between(startDate, processDate))
                .select(
                        col("sale_date").as("stat_date"),
                        col("day_of_week"),
                        col("total_sales_amount").as("sales_amount")
                );


        System.out.println("周销售趋势数据生成完成，记录数: " + weeklySales.count());
    }
}