package com.bw.day0709;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created with IntelliJ IDEA.
 *
 * @Author: mjc
 * @Date: 2025/07/04/10:51
 * @Description:
 */
public class gd3_1_dwd {
    public static void main(String[] args) throws Exception {
        // 1. 创建Flink流处理执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度为1（生产环境应根据实际情况调整）
        env.setParallelism(1);

        // 2. 创建Table API环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 3. 定义Kafka源表 - 原始页面日志数据
        tableEnv.executeSql("create table page(\n" +
                "`shop_id` string,          -- 店铺ID\n" +
                "`uid` string,              -- 用户ID\n" +
                "`behavior` string,         -- 用户行为类型\n" +
                "`gz_id` string,            -- 关注ID\n" +
                "`zbj_id` string,           -- 主播ID\n" +
                "`dsp_id` string,           -- DSP广告ID\n" +
                "`tw_id` string,            -- 推文ID\n" +
                "`3D_id` string,            -- 3D内容ID\n" +
                "`tsl_id` string,           -- 特殊标签ID\n" +
                "`is_new` string,            -- 是否新用户标识\n" +
                "`page_id` string,          -- 页面ID\n" +
                "`during_time` STRING,      -- 页面停留时间\n" +
                "`item` string,             -- 商品项\n" +
                "`item_type` string,        -- 商品类型\n" +
                "`ts` bigint,               -- 时间戳(毫秒)\n" +
                "time_ltz AS TO_TIMESTAMP_LTZ(ts, 3),  -- 将时间戳转为TIMESTAMP_LTZ类型\n" +
                "WATERMARK FOR time_ltz AS time_ltz - INTERVAL '10' days  -- 水印设置(10天延迟)\n" +
                ")" +
                xm.MyKafkaUtil.getKafkaDDL("topic_gd3_dwd_page","gd3_page_log"));  // 使用工具类获取Kafka DDL配置

        // 4. 执行字段提取和转换
        Table table = tableEnv.sqlQuery("select \n" +
                "`shop_id`,      -- 店铺ID\n" +
                "`uid`,         -- 用户ID\n" +
                "`behavior`,     -- 用户行为\n" +
                "`gz_id`,       -- 关注ID\n" +
                "`zbj_id`,      -- 主播ID\n" +
                "`dsp_id`,      -- DSP广告ID\n" +
                "`tw_id`,       -- 推文ID\n" +
                "`3D_id`,       -- 3D内容ID\n" +
                "`tsl_id`,      -- 特殊标签ID\n" +
                "`is_new`,      -- 是否新用户\n" +
                "`page_id`,     -- 页面ID\n" +
                "`during_time`, -- 停留时间\n" +
                "`item`,        -- 商品项\n" +
                "`item_type`,   -- 商品类型\n" +
                "ts             -- 原始时间戳\n" +
                "from page");

        // 5. 创建临时视图，便于后续SQL引用
        tableEnv.createTemporaryView("ddwd", table);

        // 6. 定义Kafka目标表 - 清洗后的数据
        String createTableSql = "CREATE TABLE gd3_dwd_l (\n" +
                "    shop_id STRING,               -- 店铺ID\n" +
                "    uid STRING,                   -- 用户ID\n" +
                "    behavior STRING,              -- 用户行为\n" +
                "    gz_id STRING,                 -- 关注ID\n" +
                "    zbj_id STRING,                -- 主播ID\n" +
                "    dsp_id STRING,                -- DSP广告ID\n" +
                "    tw_id STRING,                 -- 推文ID\n" +
                "    `3D_id` STRING,              -- 3D内容ID(使用反引号转义)\n" +
                "    tsl_id STRING,                -- 特殊标签ID\n" +
                "    is_new STRING,                -- 是否新用户\n" +
                "    page_id STRING,               -- 页面ID\n" +
                "    during_time STRING,           -- 停留时间\n" +
                "    item STRING,                  -- 商品项\n" +
                "    item_type STRING,             -- 商品类型\n" +
                "    ts BIGINT                     -- 时间戳\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',        -- 使用Kafka连接器\n" +
                "    'topic' = 'gd3_dwd_l',       -- 目标Kafka主题\n" +
                "    'properties.bootstrap.servers' = 'hadoop102:9092',  -- Kafka集群地址\n" +
                "    'format' = 'json',            -- 数据格式为JSON\n" +
                "    'properties.group.id' = 'dwd-group',  -- 消费者组ID\n" +
                "    'scan.startup.mode' = 'latest-offset',  -- 从最新偏移量开始消费\n" +
                "    'sink.partitioner' = 'round-robin',  -- 轮询分区策略\n" +
                "    'properties.acks' = '1',      -- 生产者确认机制(leader确认即可)\n" +
                "    'properties.retries' = '3',   -- 失败重试次数\n" +
                "    'properties.linger.ms' = '5',  -- 消息延迟发送时间(毫秒)\n" +
                "    'properties.batch.size' = '16384'  -- 批处理大小(字节)\n" +
                ");";

        // 7. 执行目标表创建
        tableEnv.executeSql(createTableSql);

        // 8. 将清洗后的数据写入目标Kafka主题
        tableEnv.executeSql("insert into gd3_dwd_l select * from ddwd");

        // 9. 打印结果(调试用)
        tableEnv.executeSql("select * from gd3_dwd_l").print();

        // 10. 启动Flink作业
        env.execute();
    }
}
