package com.bbmall.ads;

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import static org.apache.spark.sql.functions.*;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.SparkConf;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;

public class ADSStoreProcessor {

    private SparkSession spark;
    private String processDate;
    private boolean isIncremental;

    public static void main(String[] args) {
        if (args.length < 1) {
            System.err.println("Usage: DwsToAdsProcessor <process_date> [incremental|full]");
            System.err.println("Example: DwsToAdsProcessor 2024-01-15 incremental");
            System.err.println("Example: DwsToAdsProcessor 2024-01-15 full");
            System.exit(1);
        }

        String processDate = args[0];
        boolean isIncremental = args.length > 1 && "incremental".equals(args[1]);

        // 创建Spark配置
        SparkConf conf = new SparkConf()
                .setAppName("DWS_to_ADS_Processing_" + (isIncremental ? "INCREMENTAL" : "FULL"))
                .set("spark.sql.adaptive.enabled", "true")
                .set("spark.sql.adaptive.coalescePartitions.enabled", "true")
                .set("spark.sql.sources.partitionOverwriteMode", "dynamic")
                .set("spark.sql.hive.convertMetastoreParquet", "false")
                .set("spark.sql.sources.partitionColumnTypeInference.enabled", "false");

        // 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .config(conf)
                .enableHiveSupport()
                .getOrCreate();

        try {
            // 创建处理器并执行 - 现在只接受两个参数
            ADSStoreProcessor processor = new ADSStoreProcessor(spark, processDate, isIncremental);
            processor.processAllTables();

            System.out.println("ADS层数据处理作业执行成功");

        } catch (Exception e) {
            System.err.println("作业执行失败: " + e.getMessage());
            e.printStackTrace();
            System.exit(2);
        } finally {
            // 确保SparkSession被关闭
            if (spark != null) {
                spark.stop();
            }
        }
    }

    // 修正构造函数，只接受两个参数
    public ADSStoreProcessor(SparkSession spark, String processDate, boolean isIncremental) {
        this.spark = spark;
        this.processDate = processDate;
        this.isIncremental = isIncremental;
    }

    public void processAllTables() {
        try {
            System.out.println("开始处理DWS到ADS层数据");
            System.out.println("处理日期: " + processDate);
            System.out.println("处理模式: " + (isIncremental ? "增量" : "全量"));

            // 1. 处理今日核心指标表
            processTodayKpi();

            // 2. 处理热销商品分类表
            processHotCategory();

            // 3. 处理库存预警表
            processInventoryAlert();

            // 4. 处理促销效果表
            processPromotionEffect();

            // 5. 处理实时销售流水表
            processRecentSales();

            // 数据质量检查
            checkDataQuality();

            System.out.println("DWS到ADS层数据处理完成");

        } catch (Exception e) {
            System.err.println("处理过程中发生错误: " + e.getMessage());
            e.printStackTrace();
            throw e;
        }
    }

    /**
     * 1. 今日核心指标表处理
     */
    private void processTodayKpi() {
        System.out.println("开始处理今日核心指标表...");

        try {
            // 读取DWS销售汇总表
            Dataset<Row> dwsSalesSummary = spark.table("dws.dws_sales_summary_daily")
                    .filter(col("sale_date").equalTo(processDate))
                    .filter(col("dt").equalTo(processDate));

            long sourceCount = dwsSalesSummary.count();
            if (sourceCount == 0) {
                System.out.println("警告: DWS层在 " + processDate + " 无销售汇总数据");
                return;
            }

            // 直接映射到ADS表结构
            Dataset<Row> todayKpi = dwsSalesSummary
                    .select(
                            col("sale_date").as("stat_date"),
                            col("total_sales_amount").as("today_sales_amount"),
                            col("total_order_count").as("today_order_count"),
                            col("total_customer_count").as("today_customer_count"),
                            col("avg_customer_spent").as("avg_customer_spent")
                    );

            // 写入ADS层 - 动态分区覆盖
            todayKpi.write()
                    .mode(SaveMode.Overwrite)
                    .option("partitionOverwriteMode", "dynamic")
                    .partitionBy("dt")
                    .saveAsTable("ads.ads_today_kpi");

            System.out.println("今日核心指标表处理完成，处理记录数: " + todayKpi.count());

        } catch (Exception e) {
            System.err.println("处理今日核心指标表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 2. 热销商品分类表处理
     */
    private void processHotCategory() {
        System.out.println("开始处理热销商品分类表...");

        try {
            // 读取DWS商品分类销售表
            Dataset<Row> dwsCategorySales = spark.table("dws.dws_category_sales_daily")
                    .filter(col("sale_date").equalTo(processDate))
                    .filter(col("dt").equalTo(processDate));

            long sourceCount = dwsCategorySales.count();
            if (sourceCount == 0) {
                System.out.println("警告: DWS层在 " + processDate + " 无分类销售数据");
                return;
            }

            // 直接映射到ADS表结构
            Dataset<Row> hotCategory = dwsCategorySales
                    .select(
                            col("sale_date").as("stat_date"),
                            col("product_category"),
                            col("category_sales_ratio")
                    );

            // 写入ADS层
            hotCategory.write()
                    .mode(SaveMode.Overwrite)
                    .option("partitionOverwriteMode", "dynamic")
                    .partitionBy("dt")
                    .saveAsTable("ads.ads_hot_category");

            System.out.println("热销商品分类表处理完成，处理记录数: " + hotCategory.count());

        } catch (Exception e) {
            System.err.println("处理热销商品分类表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 3. 库存预警表处理
     */
    private void processInventoryAlert() {
        System.out.println("开始处理库存预警表...");

        try {
            // 读取DWS库存预警表
            Dataset<Row> dwsInventoryAlert = spark.table("dws.dws_inventory_alert")
                    .filter(col("stat_date").equalTo(processDate))
                    .filter(col("dt").equalTo(processDate));

            long sourceCount = dwsInventoryAlert.count();
            if (sourceCount == 0) {
                System.out.println("警告: DWS层在 " + processDate + " 无库存数据");
                return;
            }

            // 过滤需要预警的商品（库存小于安全库存）
            Dataset<Row> inventoryAlert = dwsInventoryAlert
                    .filter(col("current_stock").lt(col("safety_stock")))
                    .select(
                            col("stat_date"),
                            col("product_name"),
                            col("current_stock")
                    );

            // 写入ADS层
            inventoryAlert.write()
                    .mode(SaveMode.Overwrite)
                    .option("partitionOverwriteMode", "dynamic")
                    .partitionBy("dt")
                    .saveAsTable("ads.ads_inventory_alert");

            System.out.println("库存预警表处理完成，预警商品数: " + inventoryAlert.count());

        } catch (Exception e) {
            System.err.println("处理库存预警表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 4. 促销效果表处理
     */
    private void processPromotionEffect() {
        System.out.println("开始处理促销效果表...");

        try {
            // 读取DWS促销效果表
            Dataset<Row> dwsPromotionEffect = spark.table("dws.dws_promotion_effect")
                    .filter(col("sale_date").equalTo(processDate))
                    .filter(col("dt").equalTo(processDate));

            long sourceCount = dwsPromotionEffect.count();
            if (sourceCount == 0) {
                System.out.println("当日无促销数据，跳过促销效果表处理");
                return;
            }

            // 直接映射到ADS表结构
            Dataset<Row> promotionEffect = dwsPromotionEffect
                    .select(
                            col("sale_date").as("stat_date"),
                            col("promotion_type"),
                            col("sales_growth_percent")
                    );

            // 写入ADS层
            promotionEffect.write()
                    .mode(SaveMode.Overwrite)
                    .option("partitionOverwriteMode", "dynamic")
                    .partitionBy("dt")
                    .saveAsTable("ads.ads_promotion_effect");

            System.out.println("促销效果表处理完成，处理记录数: " + promotionEffect.count());

        } catch (Exception e) {
            System.err.println("处理促销效果表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 5. 实时销售流水表处理
     */
    private void processRecentSales() {
        System.out.println("开始处理实时销售流水表...");

        try {
            // 读取DWS实时销售流水表
            Dataset<Row> dwsRealtimeSales = spark.table("dws.dws_realtime_sales_stream")
                    .filter(col("sale_date").equalTo(processDate))
                    .filter(col("dt").equalTo(processDate));

            long sourceCount = dwsRealtimeSales.count();
            if (sourceCount == 0) {
                System.out.println("当日无销售流水数据，跳过处理");
                return;
            }

            // 对于历史数据处理，我们只取当天最后100条记录作为样本
            // 对于增量处理，我们取最近2小时的数据
            Dataset<Row> recentSales;

            if (isIncremental) {
                // 增量处理：获取最近2小时的数据
                List<Row> maxTimeRows = dwsRealtimeSales
                        .agg(max("sale_datetime").as("max_time"))
                        .collectAsList();

                if (maxTimeRows.isEmpty() || maxTimeRows.get(0).isNullAt(0)) {
                    System.out.println("当日无有效的销售时间数据，跳过处理");
                    return;
                }

                String maxTime = maxTimeRows.get(0).getString(0);

                recentSales = dwsRealtimeSales
                        .filter(col("sale_datetime").geq(
                                expr("from_unixtime(unix_timestamp('" + maxTime + "') - 2*3600)")
                        ))
                        .select(
                                col("sale_datetime").as("sale_time"),
                                col("transaction_id"),
                                col("sales_amount")
                        )
                        .orderBy(desc("sale_time"))
                        .limit(100);
            } else {
                // 历史数据处理：取当天最后100条记录
                recentSales = dwsRealtimeSales
                        .select(
                                col("sale_datetime").as("sale_time"),
                                col("transaction_id"),
                                col("sales_amount")
                        )
                        .orderBy(desc("sale_time"))
                        .limit(100);
            }

            // 写入ADS层
            recentSales.write()
                    .mode(SaveMode.Overwrite)
                    .option("partitionOverwriteMode", "dynamic")
                    .partitionBy("dt")
                    .saveAsTable("ads.ads_recent_sales");

            System.out.println("实时销售流水表处理完成，处理记录数: " + recentSales.count());

        } catch (Exception e) {
            System.err.println("处理实时销售流水表时出错: " + e.getMessage());
            throw e;
        }
    }

    /**
     * 数据质量检查
     */
    private void checkDataQuality() {
        System.out.println("\n开始数据质量检查...");

        // 检查各表数据量
        String[] tables = {
                "ads.ads_today_kpi", "ads.ads_hot_category",
                "ads.ads_inventory_alert", "ads.ads_promotion_effect",
                "ads.ads_recent_sales"
        };

        for (String table : tables) {
            try {
                // 检查处理日期的数据
                Dataset<Row> tableData = spark.table(table)
                        .filter(col("dt").equalTo(processDate));

                long count = tableData.count();
                System.out.println("表 " + table + " 在日期 " + processDate + " 的记录数: " + count);

                if (count == 0) {
                    System.err.println("警告: 表 " + table + " 在日期 " + processDate + " 无数据");
                }

            } catch (Exception e) {
                System.err.println("检查表 " + table + " 时出错: " + e.getMessage());
            }
        }

        // 检查核心指标数据完整性
        Dataset<Row> todayKpi = spark.table("ads.ads_today_kpi")
                .filter(col("dt").equalTo(processDate));

        if (todayKpi.count() == 0) {
            System.err.println("警告: 今日核心指标表在日期 " + processDate + " 无数据");
        } else {
            System.out.println("今日核心指标数据样例:");
            todayKpi.limit(5).show();
        }

        System.out.println("数据质量检查完成");
    }

    /**
     * 处理历史数据范围的方法（如果需要处理日期范围，可以使用这个方法）
     */
    public void processDateRange(String startDate, String endDate) {
        LocalDate start = LocalDate.parse(startDate, DateTimeFormatter.ISO_DATE);
        LocalDate end = LocalDate.parse(endDate, DateTimeFormatter.ISO_DATE);

        LocalDate current = start;
        while (!current.isAfter(end)) {
            this.processDate = current.toString();
            System.out.println("\n=== 开始处理日期: " + processDate + " ===");
            processAllTables();
            current = current.plusDays(1);
        }
    }
}