package net.bwie.realtime.jtp.dws.douyin.log.job;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

public class DouYinMinuteWindowDwsJob {
    public static void main(String[] args) {
        //1.表执行环境
        TableEnvironment tabEnv = getTableEnv();
        //2.输入表-input：映射到kafka消息队列
        createInputTable(tabEnv);
        //3.数据处理-select
        Table reportTable = handle(tabEnv);


    }

    private static Table handle(TableEnvironment tabEnv) {
        Table table = tabEnv.sqlQuery("WITH ProductMetrics AS (\n" +
                "        SELECT \n" +
                "            el.sku_id,\n" +
                "            COUNT( if(el.event_type = '进入直播间',1,0) ) AS click_cnt,\n" +
                "            COUNT(*) AS exposure_cnt,\n" +
                "            COUNT(CASE WHEN od.status = 'finished' THEN 1 END) AS order_cnt,\n" +
                "            SUM(CASE WHEN od.status = 'finished' THEN od.order_amount ELSE 0 END) AS order_amount\n" +
                "        FROM dws_live_event_log AS el\n" +
                "        LEFT JOIN dws_trade_order_detail AS od \n" +
                "            ON el.sku_id = od.sku_id\n" +
                "        GROUP BY el.sku_id\n" +
                "        -- 关键修正：用原始表达式替代别名\n" +
//                "        HAVING COUNT(*) > 0  -- 替代 exposure_cnt > 0\n" +
//                "           AND COUNT(CASE WHEN el.event_type = '进入直播间' THEN 1 END) > 0  -- 替代 click_cnt > 0\n" +
                "    )\n" +
                "    SELECT \n" +
                "        pm.sku_id AS 商品ID,\n" +
                "        ROUND(pm.click_cnt / pm.exposure_cnt, 4) AS 商品点击率,\n" +
                "        pm.order_cnt AS 成交订单数,\n" +
                "        pm.order_amount AS 成交金额,\n" +
                "        ROUND(pm.order_cnt / pm.click_cnt * 100, 4) AS 成交转化率\n" +
                "    FROM ProductMetrics AS pm;\n");
        table.execute().print();
        return table;
    }

    private static void createInputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql("CREATE TABLE dws_live_event_log (\n" +
                "    event_id BIGINT,\n" +
                "    live_room_id BIGINT,\n" +
                "    anchor_id BIGINT,\n" +
                "    user_id BIGINT,\n" +
                "    sku_id BIGINT,\n" +
                "    event_type STRING,\n" +
                "    -- 1. 原始时间字段：BIGINT 类型的时间戳（如 1724467200000，必须与 JSON 中字段名一致）\n" +
                "    event_time BIGINT,\n" +
                "    -- 2. 转换逻辑：BIGINT 时间戳 → TIMESTAMP（假设是毫秒级，若为秒级则去掉 * 1000）\n" +
                "    event_time_raw AS TO_TIMESTAMP_LTZ(event_time * 1000, 3),  -- 3 表示毫秒精度\n" +
                "    duration_sec BIGINT,\n" +
                "    platform STRING,\n" +
                "    region STRING,\n" +
                "    source_type STRING,\n" +
                "    session_id STRING\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd_live_room_event_log',\n" +
                "    'properties.group.id' = 'dws_live_event_log_group_id',\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                "    'format' = 'json',\n" +
                "    'json.ignore-parse-errors' = 'true',  -- 这里添加了逗号\n" +
                "    'scan.startup.mode' = 'earliest-offset'\n" +
                ");"
        );

        tabEnv.executeSql("CREATE TABLE dws_trade_order_detail (\n" +
                "    order_id BIGINT,\n" +
                "    event_id BIGINT,\n" +
                "    live_room_id BIGINT,\n" +
                "    anchor_id BIGINT,\n" +
                "    user_id BIGINT,\n" +
                "    sku_id BIGINT,\n" +
                "    order_time TIMESTAMP(3),\n" +
                "    order_cnt BIGINT,\n" +
                "    order_amount DECIMAL(10, 2),\n" +
                "    status STRING,\n" +
                "    WATERMARK FOR order_time AS order_time - INTERVAL '5' SECOND \n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd_trade_order_detail', -- 请替换为实际的Kafka主题名称\n" +
                "    'properties.group.id' = 'dws_trade_order_detail_group_id',\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092', -- 请替换为实际的Kafka服务器地址\n" +
                "    'format' = 'json',\n" +
                "    'json.ignore-parse-errors' = 'true',\n" +
                "    'scan.startup.mode' = 'earliest-offset'\n" +
                ");");

        tabEnv.executeSql("CREATE TABLE dws_trade_pay_detail (\n" +
                "    pay_id BIGINT,\n" +
                "    order_id BIGINT,\n" +
                "    anchor_id BIGINT,\n" +
                "    user_id BIGINT,\n" +
                "    pay_time TIMESTAMP(3),\n" +
                "    pay_amount DECIMAL(10, 2),\n" +
                "    pay_status STRING,\n" +
                "    WATERMARK FOR pay_time AS pay_time - INTERVAL '5' SECOND \n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd_trade_pay_detail', -- 请替换为实际的Kafka主题名称\n" +
                "    'properties.group.id' = 'dws_trade_pay_detail_group_id',\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                "    'format' = 'json',\n" +
                "    'json.ignore-parse-errors' = 'true',\n" +
                "    'scan.startup.mode' = 'earliest-offset'\n" +
                ");");


        tabEnv.executeSql("CREATE TABLE dws_trade_refund_detail (\n" +
                "    -- 核心业务字段：与表定义完全匹配\n" +
                "    refund_id BIGINT,          -- 退款ID\n" +
                "    order_id BIGINT,           -- 关联的订单ID\n" +
                "    pay_id BIGINT,             -- 关联的支付ID\n" +
                "    anchor_id BIGINT,          -- 主播ID\n" +
                "    user_id BIGINT,            -- 用户ID\n" +
                "    refund_time TIMESTAMP(3),        -- 原始退款时间（STRING类型，如 \"2024-08-24 12:30:45\"）\n" +
                "    refund_amount DECIMAL(10,2),-- 退款金额（保留2位小数）\n" +
                "    refund_type STRING,        -- 退款类型（如 \"退货退款\"、\"仅退款\"）\n" +
                "    refund_status STRING,      -- 退款状态（如 \"success\"、\"processing\"、\"failed\"）\n" +
                "    \n" +
                "    WATERMARK FOR refund_time AS refund_time - INTERVAL '5' SECOND\n" +
                ") WITH (\n" +
                "    -- Kafka连接器核心配置\n" +
                "    'connector' = 'kafka',                              -- 数据源类型：Kafka\n" +
                "    'topic' = 'dwd_trade_refund_detail',                -- Kafka主题名（需替换为实际主题）\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092', -- Kafka集群地址\n" +
                "    'properties.group.id' = 'dws_trade_refund_detail_group', -- 消费者组ID（自定义，避免重复）\n" +
                "    \n" +
                "    -- 数据格式配置\n" +
                "    'format' = 'json',                                  \n" +
                "    'json.ignore-parse-errors' = 'true',              \n" +
                "    'json.fail-on-missing-field' = 'false',           \n" +
                "    \n" +
                "    'scan.startup.mode' = 'earliest-offset'\n" +
                ");");


        tabEnv.executeSql("CREATE TABLE dws_user_fan_relation (\n" +
                "    relation_id BIGINT,          -- 关系唯一ID（用于区分每条粉丝关系记录）\n" +
                "    anchor_id BIGINT,            -- 主播ID（关联主播维度表）\n" +
                "    user_id BIGINT,              -- 用户ID（关联用户维度表）\n" +
                "    change_time TIMESTAMP(3),          -- 关系变化时间（原始字符串格式，如 \"2024-08-24 18:45:30\"）\n" +
                "    is_new STRING,               -- 是否新粉丝（枚举值：\"yes\"/\"no\"，标记首次关注）\n" +
                "    is_not_fan STRING,           -- 是否非粉丝（枚举值：\"yes\"/\"no\"，标记取消关注）\n" +
                "    \n" +
                "    WATERMARK FOR change_time AS change_time - INTERVAL '5' SECOND\n" +
                ") WITH (\n" +
                "    -- 4. Kafka连接器核心配置（实时数据源通用）\n" +
                "    'connector' = 'kafka',                              -- 数据源类型：Kafka\n" +
                "    'topic' = 'dwd_user_fan_relation',                  -- Kafka主题名（需替换为实际业务主题）\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092', -- Kafka集群地址\n" +
                "    'properties.group.id' = 'dws_user_fan_relation_group', -- 消费者组ID（自定义，唯一不重复）\n" +
                "    \n" +
                "    'format' = 'json',                                  -- 数据格式：JSON\n" +
                "    'json.ignore-parse-errors' = 'true',                -- 忽略JSON解析错误（如格式错乱的脏数据）\n" +
                "    'json.fail-on-missing-field' = 'false',             -- 字段缺失时不报错（缺失字段设为null，兼容数据不完整场景）\n" +
                "    \n" +
                "    'scan.startup.mode' = 'earliest-offset'    \n" +
                ");");
    }

    private static TableEnvironment getTableEnv() {
        //1.环境属性配置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings);
        //2.配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone","Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism","1");
        configuration.setString("table.exec.state.ttl","5 s");
        configuration.setString("execution.checkpointing.interval","30 s");
        return tabEnv;
    }

}
