package net.bwie.realtime.warehouse.DWD;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

/**
 * @BelongsProject: realtime-project-10zlq
 * @BelongsPackage: net.bwie.realtime.warehouse.DWD
 * @Author: zhangleqing
 * @CreateTime: 2025-08-30  14:53
 * @Description: TODO dwd_before_retreating(退前预警汇总表)
 * 相关表：order_base（订单基础信息表） logistics_info（物流节点信息表） district（地区表）
 * @Version: 1.0
 */
public class DwdBeforeRetreating {
    private static TableEnvironment tableEnv;

    public static void main(String[] args) {
        // 开启上下文
        TableEnvironment tableEnv = getTableEnv();

        // 读取数据
        readTable(tableEnv);

        // 处理数据
        Table resultTable = handle(tableEnv);

//        resultTable.execute().print();

        // 映射表
        createOutputTable(tableEnv);

        // 写出数据
        saveToSink(tableEnv, resultTable);
    }

    private static void saveToSink(TableEnvironment tableEnv, Table resultTable) {
        tableEnv.createTemporaryView("retreating", resultTable);
        tableEnv.executeSql(
                "insert into dwd_before_retreating " +
                        "select * "+
//                        "order_id, product_id, pay_time, promise_ship_time," +
//                        "refund_id, sender_area_id, receiver_area_id, logistics_start_time, logistics_end_time, expected_start_time, expected_end_time, logistics_node," +
//                        "area_id, sender_same_province_timeout, sender_cross_province_timeout, sender_special_area_flag" +
                        "from retreating"
        );
    }

    private static void createOutputTable(TableEnvironment tableEnv) {
        tableEnv.executeSql(
                "CREATE TABLE dwd_before_retreating (\n" +
                        "    order_id STRING,\n" +
                        "    product_id STRING,\n" +
                        "    pay_time TIMESTAMP(3),\n" +
                        "    promise_ship_time TIMESTAMP(3),\n" +
                        "\n" +
                        "    refund_id STRING,\n" +
                        "    sender_area_id STRING,\n" +
                        "    receiver_area_id STRING,\n" +
                        "    logistics_start_time TIMESTAMP(3),\n" +
                        "    logistics_end_time TIMESTAMP(3),\n" +
                        "    expected_start_time TIMESTAMP(3),\n" +
                        "    expected_end_time TIMESTAMP(3),\n" +
                        "    logistics_node STRING,\n" +
                        "\n" +
                        "    area_id STRING,\n" +
                        "    sender_same_province_timeout INT,\n" +
                        "    sender_cross_province_timeout INT,\n" +
                        "    sender_special_area_flag INT\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dwd_before_retreating',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092',\n" +
                        "    'format' = 'json'\n" +
                        ")"
        );
    }

    /*
    SELECT
        ob.order_id,
        ob.product_id,
        ob.pay_time,
        ob.promise_ship_time,

        li.refund_id,
        li.sender_area_id,
        li.receiver_area_id,
        li.logistics_start_time,
        li.logistics_end_time,
        li.expected_start_time,
        li.expected_end_time,
        li.logistics_node,

        sd.same_province_timeout AS sender_same_province_timeout,
        sd.cross_province_timeout AS sender_cross_province_timeout,
        sd.special_area_flag AS sender_special_area_flag,

        rd.same_province_timeout AS receiver_same_province_timeout,
        rd.cross_province_timeout AS receiver_cross_province_timeout,
        rd.special_area_flag AS receiver_special_area_flag
    FROM ods_order_base ob
    LEFT JOIN ods_logistics_info li
        ON ob.order_id = li.order_id
    LEFT JOIN ods_district sd
        ON li.sender_area_id = sd.area_id
    LEFT JOIN ods_district rd
        ON li.receiver_area_id = rd.area_id
     */
    private static Table handle(TableEnvironment tableEnv) {
        Table kk = tableEnv.sqlQuery(
                "SELECT\n" +
                        "    ob.order_id,\n" +
                        "    ob.product_id,\n" +
                        "    ob.pay_time,\n" +
                        "    ob.promise_ship_time,\n" +
                        "\n" +
                        "    li.refund_id,\n" +
                        "    li.sender_area_id,\n" +
                        "    li.receiver_area_id,\n" +
                        "    li.logistics_start_time,\n" +
                        "    li.logistics_end_time,\n" +
                        "    li.expected_start_time,\n" +
                        "    li.expected_end_time,\n" +
                        "    li.logistics_node,\n" +
                        "\n" +
                        "    sd.area_id as area_id,\n" +
                        "    sd.same_province_timeout AS sender_same_province_timeout,\n" +
                        "    sd.cross_province_timeout AS sender_cross_province_timeout,\n" +
                        "    sd.special_area_flag AS sender_special_area_flag\n" +
//                        "\n" +
//                        "    rd.area_id as receiver_area_id\n" +
//                        "    rd.same_province_timeout AS receiver_same_province_timeout,\n" +
//                        "    rd.cross_province_timeout AS receiver_cross_province_timeout,\n" +
//                        "    rd.special_area_flag AS receiver_special_area_flag\n" +
                        "FROM ods_order_base ob\n" +
                        "LEFT JOIN ods_logistics_info li\n" +
                        "    ON ob.order_id = li.order_id AND ob.order_id IS NOT NULL AND li.logistics_id IS NOT NULL\n" +
//                        "    AND li.logistics_start_time BETWEEN ob.pay_time - INTERVAL '1' HOUR AND ob.pay_time + INTERVAL '24' HOUR\n"+
                        "LEFT JOIN ods_district FOR SYSTEM_TIME AS OF li.proctime AS sd\n" +
                        "    ON li.sender_area_id = sd.area_id AND li.sender_area_id IS NOT NULL\n" +
//                        "LEFT JOIN ods_district FOR SYSTEM_TIME AS OF li.proctime AS rd\n" +
//                        "    ON li.receiver_area_id = rd.area_id AND li.receiver_area_id IS NOT NULL\n" +
                        "where ob.order_id IS NOT NULL AND ob.pay_time IS NOT NULL "
        );
        tableEnv.createTemporaryView("DwdBeforeRetreating", kk);
        return kk;
    }

    private static void readTable(TableEnvironment tableEnv) {
        // order_base（订单基础信息表）
        tableEnv.executeSql(
                "create table ods_order_base (\n" +
                        "    order_id STRING,\n" +
                        "    user_id STRING,\n" +
                        "    shop_id STRING,\n" +
                        "    product_id STRING,\n" +
                        "    pay_time TIMESTAMP(3),\n" +
                        "    promise_ship_time TIMESTAMP(3),\n" +
                        "    click_ship_time TIMESTAMP(3),\n" +
                        "    order_amount DECIMAL(10,2),\n" +
                        "    order_status STRING,\n" +
                        "    proctime AS PROCTIME(),\n" +
                        "    WATERMARK FOR pay_time AS pay_time - INTERVAL '5' SECOND\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'ods-order-base-Log',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092',\n" +
                        "    'properties.group.id' = 'ods-order-base-Log',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json'\n" +
                        ")"
        );

        // logistics_info（物流节点信息表）
        tableEnv.executeSql(
                "create table ods_logistics_info (\n" +
                        "    logistics_id STRING,\n" +
                        "    refund_id STRING,\n" +
                        "    order_id STRING,\n" +
                        "    sender_area_id STRING,\n" +
                        "    receiver_area_id STRING,\n" +
                        "    logistics_node STRING,\n" +
                        "    logistics_start_time TIMESTAMP(3),\n" +
                        "    logistics_end_time TIMESTAMP(3),\n" +
                        "    expected_start_time TIMESTAMP(3),\n" +
                        "    expected_end_time TIMESTAMP(3),\n" +
                        "    node_status STRING,\n" +
                        "    proctime AS PROCTIME(),\n" +
                        "    WATERMARK FOR logistics_start_time AS logistics_start_time - INTERVAL '5' SECOND\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'ods-logistics-info-log',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092',\n" +
                        "    'properties.group.id' = 'ods-logistics-info-log',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json'\n" +
                        ")"
        );

        // district（地区表）
        tableEnv.executeSql(
                "CREATE TABLE ods_district (\n" +
                        "    area_id STRING,\n" +
                        "    area_name STRING,\n" +
                        "    area_level STRING,\n" +
                        "    same_province_timeout INT,\n" +
                        "    cross_province_timeout INT,\n" +
                        "    special_area_flag INT\n" +
                        ") WITH (\n" +
                        "    'connector' = 'doris',\n" +
                        // Doris FE 的 HTTP 端口（用于 BE 数据传输）
                        "    'fenodes' = 'node102:8030',\n" +
                        // 新增：Doris FE 的 JDBC 端口（默认9030）
                        "    'jdbc-url' = 'jdbc:mysql://node102:9030',\n" +
                        "    'table.identifier' = 'transactions_ods.district',\n" +
                        "    'username' = 'root',\n" +
                        "    'password' = '123456',\n" +
                        // 维表查询失败重试3次
                        "    'lookup.max-retries' = '3',\n"+
                        "    -- 移除不支持的'read.mode'参数（当前版本默认批处理模式）\n" +
                        "    'lookup.cache.max-rows' = '10000',  -- 支持的缓存参数\n" +
                        "    'lookup.cache.ttl' = '3600000'      -- 支持的缓存过期时间\n" +
                        ")"
        );
    }

    public static TableEnvironment getTableEnv() {
        // 1.环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings);
        // 2.配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        // 状态TTL设置为25小时（3600*25=90000秒），覆盖24小时的关联区间
        configuration.setString("table.exec.state.ttl", "90000 s");
//        configuration.setString("execution.checkpointing.interval", "5 s");
        // 3.返回对象
        return tabEnv;
    }
}
