package com.bbmall.ads;

import com.bbmall.SparkSessionUtil;
import org.apache.spark.sql.SparkSession;

/**
 * 销售分析 ADS 表抽取
 */
public class DwsToAdsSalesAnalysis {
    public static void main(String[] args) {
        // 解析参数
        if (args.length < 2) {
            System.err.println("参数错误！用法：");
            System.err.println("全量处理：<is_full=true> <date_range=start_date,end_date>（如：true 2023-01-01,2025-10-17）");
            System.err.println("增量处理：<is_full=false> <biz_date=yyyy-MM-dd>（如：false 2025-10-18）");
            System.exit(1);
        }
        boolean isFull = Boolean.parseBoolean(args[0]);
        String dateParam = args[1];

        // 动态生成日期过滤条件
        String whereClause;
        if (isFull) {
            String[] dates = dateParam.split(",");
            whereClause = "dt BETWEEN '" + dates[0] + "' AND '" + dates[1] + "'";
        } else {
            whereClause = "dt = '" + dateParam + "'";
        }

        // 动态生成SQL（主体唯一，通过参数控制插入方式和条件）
        String sql = (isFull ? "INSERT OVERWRITE" : "INSERT INTO") + " TABLE ads.ads_sales_analysis PARTITION (dt) " +
                "SELECT " +
                "  SUM(total_sales_amount) AS total_sales_amount, " +  // 当日总销售额
                "  SUM(order_count) AS order_count, " +                // 当日总订单数
                "  SUM(customer_flow) AS customer_flow, " +            // 当日总客流
                "  SUM(total_sales_amount)/SUM(order_count) AS avg_order_amount, " +  // 客单价
                // 热销分类：JSON格式{"分类":销售额}
                "  to_json(collect_list(map(product_category, category_sales_amount))) AS hot_category_sales, " +
                // 本周销售趋势：JSON数组[{"date":日期,"amount":销售额}]
                "  to_json(collect_list(map(sale_date, total_sales_amount))) AS weekly_sales_trend, " +
                // 库存预警列表：JSON数组[{"product":商品,"stock":库存}]
                "  to_json(collect_list(map('预警商品', product_category, '当前库存', inventory_warning_count))) AS inventory_warning_list, " +
                // 促销效果：JSON格式{"促销类型":销售额}
                "  to_json(collect_list(map(promotion_type, promotion_sales_amount))) AS promotion_effect, " +
                // 销售流水：从DWS层明细聚合（示例逻辑，实际可关联订单表）
                "  to_json(collect_list(map('订单数', order_count, '金额', total_sales_amount, '时间', sale_date))) AS realtime_sales_stream, " +
                "  CURRENT_TIMESTAMP() AS update_time, " +
                "  dt " +  // 分区字段=日期
                "FROM dws.dws_sales_day " +
                "WHERE " + whereClause + " " +
                "GROUP BY dt, sale_date, product_category, promotion_type;";  // 按日期+维度分组

        // 执行
        SparkSession spark = SparkSessionUtil.getSparkSession("DWS_TO_ADS_SALES_ANALYSIS");
        try {
            spark.sql(sql);
            System.out.println("销售分析ADS表" + (isFull ? "全量" : "增量") + "处理完成！");
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        } finally {
            SparkSessionUtil.close();
        }
    }
}