package com.bw.realtime.app.dws;

import com.bw.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * 页面流量分析：统计访客数最高的10个页面的关键指标
 * 不使用子查询的优化版本
 */
public class DwsTrafficPageAnalysis {

    public static void main(String[] args) throws Exception {

        // TODO 1. 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 设置状态TTL
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(5L));

        // TODO 2. 创建页面日志表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_page_log ( " +
                "    `common` MAP<STRING, STRING>, " +
                "    `page` MAP<STRING, STRING>, " +
                "    `ts` BIGINT, " +
                "    `row_time` AS TO_TIMESTAMP_LTZ(ts, 3), " +
                "    WATERMARK FOR `row_time` AS `row_time` - INTERVAL '5' SECOND " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log", "page_analysis_001"));

        // TODO 3. 创建动作日志表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_action_log ( " +
                "    `common` MAP<STRING, STRING>, " +
                "    `action` MAP<STRING, STRING>, " +
                "    `page` MAP<STRING, STRING>, " +
                "    `ts` BIGINT, " +
                "    `row_time` AS TO_TIMESTAMP_LTZ(ts, 3), " +
                "    WATERMARK FOR `row_time` AS `row_time` - INTERVAL '5' SECOND " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_traffic_action_log", "action_analysis_001"));

        // TODO 4. 创建订单明细表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_trade_order_detail ( " +
                "    `id` STRING, " +
                "    `order_id` STRING, " +
                "    `user_id` STRING, " +
                "    `sku_id` STRING, " +
                "    `source_id` STRING, " +
                "    `source_type_id` STRING, " +
                "    `create_time` STRING, " +
                "    `split_total_amount` DECIMAL(16,2), " +
                "    `row_op_ts` TIMESTAMP_LTZ(3) " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_trade_order_detail", "order_analysis_001"));

        // TODO 5. 创建支付成功明细表
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_trade_pay_detail_suc ( " +
                "    `order_detail_id` STRING, " +
                "    `order_id` STRING, " +
                "    `user_id` STRING, " +
                "    `sku_id` STRING, " +
                "    `source_id` STRING, " +
                "    `source_type_id` STRING, " +
                "    `callback_time` STRING, " +
                "    `split_payment_amount` DECIMAL(16,2), " +
                "    `row_op_ts` TIMESTAMP_LTZ(3) " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_trade_pay_detail_suc", "pay_analysis_001"));

        // TODO 6. 分别计算各个指标
// 6.1 页面基础访客统计
        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_base_stats AS " +
                "SELECT " +
                "    page['page_id'] AS page_id, " +
                "    COUNT(DISTINCT common['mid']) AS visitor_count, " +
                "    COUNT(DISTINCT common['uid']) AS click_user_count " +
                "FROM dwd_traffic_page_log " +
                "WHERE page['page_id'] IS NOT NULL " +
                "GROUP BY page['page_id']");

// 6.2 页面引导订单统计
        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_guide_order_stats AS " +
                "SELECT " +
                "    pl.page['page_id'] AS page_id, " +
                "    COUNT(DISTINCT od.user_id) AS guide_order_user_count " +
                "FROM dwd_traffic_page_log pl " +
                "JOIN dwd_trade_order_detail od ON pl.common['uid'] = od.user_id " +
                "WHERE od.source_id <> '2401' " +
                "AND TO_TIMESTAMP(od.create_time) >= pl.row_time " +
                "AND TO_TIMESTAMP(od.create_time) <= pl.row_time + INTERVAL '1' HOUR " +
                "GROUP BY pl.page['page_id']");

// 6.3 页面引导支付统计
        tableEnv.executeSql("" +
                "CREATE TEMPORARY VIEW page_guide_payment_stats AS " +
                "SELECT " +
                "    pl.page['page_id'] AS page_id, " +
                "    COUNT(DISTINCT ps.user_id) AS guide_payment_user_count, " +
                "    SUM(ps.split_payment_amount) AS guide_payment_amount " +
                "FROM dwd_traffic_page_log pl " +
                "JOIN dwd_trade_order_detail od ON pl.common['uid'] = od.user_id " +
                "JOIN dwd_trade_pay_detail_suc ps ON od.order_id = ps.order_id " +
                "WHERE ps.source_id <> '2401' " +
                "AND TO_TIMESTAMP(ps.callback_time) >= pl.row_time " +
                "AND TO_TIMESTAMP(ps.callback_time) <= pl.row_time + INTERVAL '24' HOUR " +
                "GROUP BY pl.page['page_id']");

// 7. 关联所有页面级别指标
        Table preciseResultTable = tableEnv.sqlQuery("" +
                "SELECT " +
                "    pbs.page_id, " +
                "    pbs.visitor_count, " +
                "    pbs.click_user_count, " +
                "    COALESCE(pgos.guide_order_user_count, 0) AS guide_order_user_count, " +
                "    COALESCE(pgps.guide_payment_user_count, 0) AS guide_payment_user_count, " +
                "    COALESCE(pgps.guide_payment_amount, 0) AS guide_payment_amount, " +
                "    CURRENT_TIMESTAMP AS analysis_time " +
                "FROM page_base_stats pbs " +
                "LEFT JOIN page_guide_order_stats pgos ON pbs.page_id = pgos.page_id " +
                "LEFT JOIN page_guide_payment_stats pgps ON pbs.page_id = pgps.page_id " +
                "WHERE pbs.page_id IS NOT NULL " +
                "ORDER BY pbs.visitor_count DESC " +
                "LIMIT 10");

        // TODO 8. 创建结果表（输出到Kafka）
        tableEnv.executeSql("" +
                "CREATE TABLE page_analysis_result ( " +
                "    `page_id` STRING, " +
                "    `visitor_count` BIGINT, " +
                "    `click_user_count` BIGINT, " +
                "    `guide_order_user_count` BIGINT, " +
                "    `guide_payment_user_count` BIGINT, " +
                "    `guide_payment_amount` DECIMAL(16,2), " +
                "    `analysis_time` TIMESTAMP(3), " +
                "    PRIMARY KEY (page_id) NOT ENFORCED " +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_traffic_page_analysis_result"));

        // TODO 9. 将结果写入Kafka
        tableEnv.executeSql("INSERT INTO page_analysis_result SELECT * FROM " + preciseResultTable);

        // TODO 10. 打印结果到控制台（用于测试）
        tableEnv.toRetractStream(preciseResultTable, org.apache.flink.types.Row.class)
                .print("Page Analysis Result");

        // TODO 11. 启动任务
        env.execute("DwdTrafficPageAnalysis");
    }
}