package com.bbmall.ads;

import com.bbmall.SparkSessionUtil;
import org.apache.spark.sql.SparkSession;

/**
 * 3. 门店分析 ADS 表抽取
 */
public class DwsToAdsStoreAnalysis {
    public static void main(String[] args) {
        // 解析参数
        if (args.length < 2) {
            System.err.println("参数错误！用法：");
            System.err.println("全量处理：<is_full=true> <date_range=start_date,end_date>");
            System.err.println("增量处理：<is_full=false> <biz_date=yyyy-MM-dd>");
            System.exit(1);
        }
        boolean isFull = Boolean.parseBoolean(args[0]);
        String dateParam = args[1];

        // 动态生成日期条件
        String whereClause;
        if (isFull) {
            String[] dates = dateParam.split(",");
            whereClause = "dt BETWEEN '" + dates[0] + "' AND '" + dates[1] + "'";
        } else {
            whereClause = "dt = '" + dateParam + "'";
        }

        // 动态生成SQL
        String sql = (isFull ? "INSERT OVERWRITE" : "INSERT INTO") + " TABLE ads.ads_store_analysis PARTITION (dt) " +
                "SELECT " +
                "  COUNT(DISTINCT store_code) AS total_store_count, " +  // 门店总数
                "  SUM(CASE WHEN store_sales_amount > 0 THEN 1 ELSE 0 END) AS operating_store_count, " +  // 营业中门店数
                "  SUM(store_customer_flow) AS total_customer_flow, " +  // 总客流
                "  AVG(store_sales_amount) AS avg_store_sales, " +  // 平均销售额
                // 门店销售排名Top5：JSON数组[{"store_name":名称,"sales":销售额}]
                "  to_json(collect_list(map(store_code, store_sales_amount))) AS top_store_sales, " +
                // 客流分布：JSON格式{"门店":客流}
                "  to_json(collect_list(map(store_code, store_customer_flow))) AS store_flow_distribution, " +
                // 低效门店预警：JSON数组[{"store_name":名称,"daily_sales":销售额}]
                "  to_json(collect_list(map(store_code, store_sales_amount)) " +
                "    FILTER (WHERE store_sales_amount < 50000)) AS low_efficiency_stores, " +  // 阈值50000
                // 坪效分析：JSON数组[{"store":名称,"efficiency":坪效}]
                "  to_json(collect_list(map(store_code, store_avg_sales_per_area))) AS store_efficiency, " +
                // 库存周转率：JSON数组[{"store":名称,"turnover_rate":周转率}]
                "  to_json(collect_list(map(store_code, inventory_turnover_rate))) AS inventory_turnover, " +
                "  CURRENT_TIMESTAMP() AS update_time, " +
                "  dt " +
                "FROM dws.dws_store_day " +
                "WHERE " + whereClause + " " +
                "GROUP BY dt, store_code;";

        // 执行
        SparkSession spark = SparkSessionUtil.getSparkSession("DWS_TO_ADS_STORE_ANALYSIS");
        try {
            spark.sql(sql);
            System.out.println("门店分析ADS表" + (isFull ? "全量" : "增量") + "处理完成！");
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        } finally {
            SparkSessionUtil.close();
        }
    }
}