package net.bwie.realtime.jtp.log.job;

import net.bwie.realtime.jtp.log.utils.AnalyzerUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

import java.util.List;

/**
 * 简介说明:
 *
 * @author: LiLi
 * @date: 2025/05/20 19:50:54
 * @version: 1.0
 */
public class JtpTrafficSearchKeywordMinuteWindowDwsJob {
    public static void main(String[] args) {
        // 1. 表执行环境
        TableEnvironment tabEnv = getTableEnv() ;

        // 2. 输入表-input：映射到Kafka消息队列
        createInputTable(tabEnv);

        // 3. 数据处理-select
        Table reportTable = handle(tabEnv);

        // 4. 输出表-output：映射到Clickhouse表
        createOutputTable(tabEnv) ;

        // 5. 保存数据-insert
        saveToClickHouse(tabEnv, reportTable) ;
    }

    private static void saveToClickHouse(TableEnvironment tabEnv, Table reportTable) {

//        a 注册视图
        tabEnv.createTemporaryView("report_table", reportTable) ;


        tabEnv.executeSql("INSERT INTO dws_traffic_search_keyword_window_report_clickhouse_sink\n" +
                "SELECT\n" +
                "    DATE_FORMAT(window_start_time, 'yyyy-MM-dd HH:mm:ss') AS window_start_time,\n" +
                "    DATE_FORMAT(window_end_time, 'yyyy-MM-dd HH:mm:ss') AS window_end_time,\n" +
                "    keyword,\n" +
                "    keyword_count,\n" +
                "    ts\n" +
                "FROM report_table");
    }


    private static void createOutputTable(TableEnvironment tabEnv) {

        tabEnv.executeSql(
                     "CREATE TABLE dws_traffic_search_keyword_window_report_clickhouse_sink (\n" +
                        "    `window_start_time` STRING,\n" +
                        "    `window_end_time` STRING,\n" +
                        "    `keyword` STRING,\n" +
                        "    `keyword_count` BIGINT,\n" +
                        "    `ts`  BIGINT \n" +
                        ") WITH (\n" +
                        "    'connector' = 'clickhouse',\n" +
                        "    'url' = 'jdbc:clickhouse://node103:8123/jtp_log_report',\n" +
                        "    'table' = 'dws_traffic_search_keyword_window_report',\n" +
                        "    'username' = 'default',\n" +
                        "    'password' = '',\n" +
                        "    'format' = 'json'\n" +
                        ")"
        );
    }

    // 修正后的 handle 方法
    private static Table handle(TableEnvironment tabEnv) {

        Table searchlogtable = tabEnv.sqlQuery(
                "SELECT page['item'] AS fullword, row_time " +
                        "FROM dwd_traffic_page_log_kafka_source " +
                        "WHERE " +
                        "page['item'] IS NOT NULL " +
                        "AND page['last_page_id'] = 'search' " +
                        "AND page['item_type'] = 'keyword'"
        );

        tabEnv.createTemporaryView("search_log_table", searchlogtable);

        tabEnv.createTemporarySystemFunction("ik_analyzer_function", IkAnalyzerFunction.class);


        Table splitTable = tabEnv.sqlQuery(
                "SELECT fullword, keyword, row_time " +
                        "FROM search_log_table, LATERAL TABLE(ik_analyzer_function(fullword)) AS T(keyword)"
        );

//        splitTable.execute().print();


        // 注册 splitTable 为临时视图 "split_table"
        tabEnv.createTemporaryView("split_table", splitTable); // 关键修复点

        Table reportTable = tabEnv.sqlQuery(
                "SELECT " +
                        "  TUMBLE_START(row_time, INTERVAL '1' MINUTES) AS window_start_time, " +
                        "  TUMBLE_END(row_time, INTERVAL '1' MINUTES) AS window_end_time, " +
                        "  keyword, " +
                        "  COUNT(keyword) AS keyword_count, " +
                        "  UNIX_TIMESTAMP() * 1000 AS ts " +
                        "  FROM split_table " +
                        "  GROUP BY TUMBLE(row_time, INTERVAL '1' MINUTES), keyword"
        );

//        reportTable.execute().print();
        // 调试用：检查输入数据的时间范围

        return reportTable; // 关键修复点（原返回 null）
    }

    private static void createInputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql(
                "CREATE TABLE dwd_traffic_page_log_kafka_source\n" +
                        "(\n" +
                        "    `common` MAP<STRING, STRING> COMMENT '公共环境信息',\n" +
                        "    `page`   MAP<STRING, STRING> COMMENT '页面信息',\n" +
                        "    `ts`     BIGINT,\n" +
                        "    row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss.SSS')),\n" +
                        "    WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dwd_traffic_page_log',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid_dws_traffic_search_keyword',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")\n" );


    }


    private static TableEnvironment getTableEnv(){
        // 1环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .useBlinkPlanner()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings) ;
        // 2配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");
        // 3返回对象
        return tabEnv;
    }
    /**
     * FlinkSQL中自定义UDTF函数，使用IKAnalyzer分词器对搜索词进行分词
//     https://nightlies.apache.org/flink/flink-docs-release-1.13/docs/dev/table/functions/udfs/#table-functions
     */
    @FunctionHint(output = @DataTypeHint("ROW<keyword STRING>"))
    public static class IkAnalyzerFunction extends TableFunction<Row> {
        public void eval(String fullWord) throws Exception {
            // 中文分词
            List<String> list = AnalyzerUtil.ikAnalyzer(fullWord);
            // 循环遍历输出
            for (String keyword : list) {
                collect(Row.of(keyword));
            }
        }
    }


}

