package com.lhd.app.dws;

import com.lhd.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.time.Duration;

/**
 * 页面流量分析：避免子查询的优化版本
 */
public class DwsTrafficPageAnalysis {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10L));

        // 1. 创建页面日志表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_page_log ( " +
                "    `common` MAP<STRING, STRING>, " +
                "    `page` MAP<STRING, STRING>, " +
                "    `ts` BIGINT, " +
                "    `row_time` AS TO_TIMESTAMP_LTZ(ts, 3), " +
                "    WATERMARK FOR `row_time` AS `row_time` - INTERVAL '3' SECOND " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log", "dws_page_analysis"));

        // 2. 创建动作日志表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_action_log ( " +
                "    `common` MAP<STRING, STRING>, " +
                "    `action` MAP<STRING, STRING>, " +
                "    `page` MAP<STRING, STRING>, " +
                "    `ts` BIGINT, " +
                "    `row_time` AS TO_TIMESTAMP_LTZ(ts, 3), " +
                "    WATERMARK FOR `row_time` AS `row_time` - INTERVAL '3' SECOND " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_traffic_action_log", "dws_page_analysis"));

        // 3. 创建订单明细表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_trade_order_detail ( " +
                "    `id` STRING, " +
                "    `order_id` STRING, " +
                "    `user_id` STRING, " +
                "    `sku_id` STRING, " +
                "    `source_id` STRING, " +
                "    `source_type_id` STRING, " +
                "    `create_time` STRING, " +
                "    `split_total_amount` DECIMAL(16,2), " +
                "    `order_time` AS TO_TIMESTAMP_LTZ(UNIX_TIMESTAMP(create_time) * 1000, 3), " +
                "    WATERMARK FOR `order_time` AS `order_time` - INTERVAL '3' SECOND, " +
                "    `row_op_ts` TIMESTAMP_LTZ(3) " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_trade_order_detail", "dws_page_analysis"));

        // 4. 创建支付成功明细表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_trade_pay_detail_suc ( " +
                "    `order_detail_id` STRING, " +
                "    `order_id` STRING, " +
                "    `user_id` STRING, " +
                "    `sku_id` STRING, " +
                "    `source_id` STRING, " +
                "    `source_type_id` STRING, " +
                "    `callback_time` STRING, " +
                "    `split_payment_amount` DECIMAL(16,2), " +
                "    `pay_time` AS TO_TIMESTAMP_LTZ(UNIX_TIMESTAMP(callback_time) * 1000, 3), " +
                "    WATERMARK FOR `pay_time` AS `pay_time` - INTERVAL '3' SECOND, " +
                "    `row_op_ts` TIMESTAMP_LTZ(3) " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_trade_pay_detail_suc", "dws_page_analysis"));

        // 5. 创建各指标临时视图（窗口聚合）
        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_base_metrics AS " +
                "SELECT " +
                "    page['page_id'] AS page_id, " +
                "    TUMBLE_START(row_time, INTERVAL '30' SECOND) AS window_start, " +
                "    TUMBLE_END(row_time, INTERVAL '30' SECOND) AS window_end, " +
                "    COUNT(DISTINCT common['mid']) AS visitor_count, " +
                "    COUNT(DISTINCT common['uid']) AS click_user_count " +
                "FROM dwd_traffic_page_log " +
                "WHERE page['page_id'] IS NOT NULL AND page['page_id'] <> '' " +
                "GROUP BY TUMBLE(row_time, INTERVAL '30' SECOND), page['page_id']");

        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_action_metrics AS " +
                "SELECT " +
                "    page['page_id'] AS page_id, " +
                "    TUMBLE_START(row_time, INTERVAL '30' SECOND) AS window_start, " +
                "    TUMBLE_END(row_time, INTERVAL '30' SECOND) AS window_end, " +
                "    COUNT(DISTINCT common['uid']) AS action_user_count, " +
                "    COUNT(*) AS action_count " +
                "FROM dwd_traffic_action_log " +
                "WHERE page['page_id'] IS NOT NULL AND page['page_id'] <> '' " +
                "GROUP BY TUMBLE(row_time, INTERVAL '30' SECOND), page['page_id']");

        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_guide_orders AS " +
                "SELECT " +
                "    pl.page['page_id'] AS page_id, " +
                "    TUMBLE_START(pl.row_time, INTERVAL '30' SECOND) AS window_start, " +
                "    TUMBLE_END(pl.row_time, INTERVAL '30' SECOND) AS window_end, " +
                "    COUNT(DISTINCT od.user_id) AS guide_order_user_count, " +
                "    COUNT(DISTINCT od.order_id) AS guide_order_count, " +
                "    SUM(od.split_total_amount) AS guide_order_amount " +
                "FROM dwd_traffic_page_log pl " +
                "JOIN dwd_trade_order_detail od " +
                "    ON pl.common['uid'] = od.user_id " +
                "    AND od.order_time BETWEEN pl.row_time - INTERVAL '1' day AND pl.row_time + INTERVAL '1' day " +
                "WHERE pl.page['page_id'] IS NOT NULL " +
                "    AND pl.page['page_id'] <> '' " +
                "    AND od.source_type_id in ('2402','2403','2404') " +
                "GROUP BY TUMBLE(pl.row_time, INTERVAL '30' SECOND), pl.page['page_id']");

//        tableEnv.executeSql("" +
//                "CREATE TEMPORARY VIEW page_guide_payments AS " +
//                "SELECT " +
//                "    pl.page['page_id'] AS page_id, " +
//                "    TUMBLE_START(pl.row_time, INTERVAL '30' SECOND) AS window_start, " +
//                "    TUMBLE_END(pl.row_time, INTERVAL '30' SECOND) AS window_end, " +
//                "    COUNT(DISTINCT ps.user_id) AS guide_payment_user_count, " +
//                "    COUNT(DISTINCT ps.order_id) AS guide_payment_count, " +
//                "    SUM(ps.split_payment_amount) AS guide_payment_amount " +
//                "FROM dwd_traffic_page_log pl " +
//                "JOIN dwd_trade_order_detail od " +
//                "    ON pl.common['uid'] = od.user_id " +
//                "    AND od.order_time BETWEEN pl.row_time - INTERVAL '1' day AND pl.row_time + INTERVAL '1' day " +
//                "JOIN dwd_trade_pay_detail_suc ps " +
//                "    ON od.order_id = ps.order_id " +
//                "    AND ps.pay_time BETWEEN od.order_time - INTERVAL '1' day AND od.order_time + INTERVAL '1' day " +
//                "WHERE  od.source_type_id in ('2402','2403','2404') " +
//                "GROUP BY TUMBLE(pl.row_time, INTERVAL '30' SECOND), pl.page['page_id']");

        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_guide_payments AS " +
                "SELECT " +
                "    pl.page['page_id'] AS page_id, " +
                "    TUMBLE_START(pl.row_time, INTERVAL '30' SECOND) AS window_start, " +
                "    TUMBLE_END(pl.row_time, INTERVAL '30' SECOND) AS window_end, " +
                "    COUNT(DISTINCT ps.user_id) AS guide_payment_user_count, " +
                "    COUNT(DISTINCT ps.order_id) AS guide_payment_count, " +
                "    SUM(ps.split_payment_amount) AS guide_payment_amount " +
                "FROM dwd_traffic_page_log pl " +
                "JOIN dwd_trade_order_detail od " +
                "    ON pl.common['uid'] = od.user_id " +
                "    AND od.order_time BETWEEN pl.row_time AND pl.row_time + INTERVAL '1' HOUR " +  // 缩小时间范围
                "JOIN dwd_trade_pay_detail_suc ps " +
                "    ON od.order_id = ps.order_id " +
                "    AND ps.pay_time BETWEEN od.order_time AND od.order_time + INTERVAL '2' HOUR " + // 缩小时间范围
                "WHERE pl.page['page_id'] IS NOT NULL " +
                "    AND pl.page['page_id'] <> '' " +
                "    AND od.source_type_id in ('2402','2403','2404') " +
                "    AND ps.source_type_id in ('2402','2403','2404') " +  // 添加支付表过滤
                "GROUP BY TUMBLE(pl.row_time, INTERVAL '30' SECOND), pl.page['page_id']");

        // 修改第6部分的查询
        Table resultTable = tableEnv.sqlQuery("" +
                "SELECT " +
                "    bm.page_id, " +
                "    bm.window_start, " +
                "    bm.window_end, " +
                "    bm.visitor_count, " +
                "    COALESCE(bm.click_user_count, 0) AS click_user_count, " +  // 修正此处
                "    COALESCE(go.guide_order_user_count, 0) AS guide_order_user_count, " +
                "    COALESCE(gp.guide_payment_user_count, 0) AS guide_payment_user_count, " +
                "    COALESCE(gp.guide_payment_amount, 0) AS guide_payment_amount, " +
                "    CURRENT_TIMESTAMP AS analysis_time, " +
                "    (bm.visitor_count + COALESCE(bm.click_user_count, 0) + COALESCE(go.guide_order_user_count, 0) + COALESCE(gp.guide_payment_user_count, 0)) AS total_engagement " +
                "FROM page_base_metrics bm " +
                "LEFT JOIN page_action_metrics am " +
                "    ON bm.page_id = am.page_id AND bm.window_start = am.window_start " +
                "LEFT JOIN page_guide_orders go " +
                "    ON bm.page_id = go.page_id AND bm.window_start = go.window_start " +
                "LEFT JOIN page_guide_payments gp " +
                "    ON bm.page_id = gp.page_id AND bm.window_start = gp.window_start " +
                "WHERE bm.visitor_count > 0 " +
                "ORDER BY bm.visitor_count DESC, total_engagement DESC " +
                "LIMIT 10");


        // 7. 输出结果（可选调试）
        tableEnv.toRetractStream(resultTable, Row.class).print(">>>>");

        // 8. 创建 DWS 结果表
        tableEnv.executeSql("" +
                "CREATE TABLE dws_traffic_page_analysis ( " +
                "    `page_id` STRING, " +
                "    `window_start` TIMESTAMP(3), " +
                "    `window_end` TIMESTAMP(3), " +
                "    `visitor_count` BIGINT, " +
                "    `click_user_count` BIGINT, " +
                "    `guide_order_user_count` BIGINT, " +
                "    `guide_payment_user_count` BIGINT, " +
                "    `guide_payment_amount` DECIMAL(16,2), " +
                "    `analysis_time` TIMESTAMP(3), " +
                "    `total_engagement` BIGINT, " +
                "    PRIMARY KEY (page_id, window_start) NOT ENFORCED " +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dws_traffic_page_analysis"));

        // 9. 写入 Kafka
        tableEnv.executeSql("INSERT INTO dws_traffic_page_analysis SELECT * FROM " + resultTable);

        // 10. 执行任务
        env.execute("DwsTrafficPageAnalysis");
    }
}