package net.bw.realtime.jtp.dws.log.job;


import net.bw.realtime.jtp.dws.log.utils.AnalyzerUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.util.List;

/*
 * @author liuyawei
 * @date 2025/5/20
 */
public class JtpTrafficSearchKeywordMinuteWindowDwsJob {

    public static void main(String[] args) {

        //  1. 创建表执行环境
        TableEnvironment tabEnv = getTableEnv();
//        System.out.println(System.("getPropertyjava.class.path"));
        // 2. 创建输入表
        createInputTable(tabEnv);

        // 3. 处理数据
        Table reportTable = handle(tabEnv);

        // 4.创建输出表
        createOutputTable(tabEnv);

        // 5.保存数据
        saveToClickHouse(tabEnv, reportTable);

    }

    // 保存数据
    private static void saveToClickHouse(TableEnvironment tabEnv, Table reportTable) {

        // 创建临时表
        tabEnv.createTemporaryView("report_table", reportTable);

        tabEnv.executeSql(
         "INSERT INTO dws_traffic_search_keyword_window_report_clickhouse_sink\n" +
                 "SELECT\n" +
                 "    DATE_FORMAT(window_start_time, 'yyyy-MM-dd HH:mm:ss') AS window_start_time\n" +
                 "    , DATE_FORMAT(window_end_time, 'yyyy-MM-dd HH:mm:ss') AS window_end_time\n" +
                 "    , keyword\n" +
                 "    , keyword_count\n" +
                 "    , ts\n" +
                 "FROM report_table"
        );

    }

    // 创建输出表
    private static void createOutputTable(TableEnvironment tabEnv) {
        // 创建输出表

        tabEnv.executeSql("CREATE TABLE dws_traffic_search_keyword_window_report_clickhouse_sink (\n" +
                "    `window_start_time` STRING COMMENT '窗口开始日期时间',\n" +
                "    `window_end_time` STRING COMMENT '窗口结束日期时间',\n" +
                "    `keyword` STRING COMMENT '搜索关键词',\n" +
                "    `keyword_count` BIGINT COMMENT '搜索关键词被搜索次数',\n" +
                "    `ts` BIGINT COMMENT '数据产生时间戳'\n" +
                ") WITH (\n" +
                "    'connector' = 'clickhouse',\n" +
                "    'url' = 'jdbc:clickhouse://node103:8123/jtp_log_report',\n" +
                "    'table' = 'dws_traffic_search_keyword_window_report',\n" +
                "    'username' = 'default',\n" +
                "    'password' = '',\n" +
                "    'format' = 'json'\n" +
                ")"
        );
    }

    // 处理数据
    private static Table handle(TableEnvironment tabEnv) {

        // 过滤出有关键词的页面数据
        Table keywordTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        " page['item'] AS full_word\n" +
                        " , row_time\n" +
                        " FROM dwd_traffic_page_log_kafka_source\n" +
                        " WHERE page['item'] IS NOT NULL\n" +
                        " AND page['last_page_id'] = 'search'\n" +
                        " AND page['item_type'] = 'keyword'"
        );

        // 创建临时视图
        tabEnv.createTemporaryView("keyword_table", keywordTable);

        // 创建自定义函数
        tabEnv.createTemporarySystemFunction("ik_udtf", IKAnalyzerFunction.class);
        Table wordLog = tabEnv.sqlQuery(
                "SELECT" +
                        " full_word" +
                        ", keyword" +
                        ", row_time" +
                        " FROM keyword_table," +
                        "  LATERAL TABLE(ik_udtf(full_word)) AS T(keyword)"
        );

        // 根据分词得到的数据创建临时视图
        tabEnv.createTemporaryView("word_log", wordLog);

        // 通过窗口函数分组聚合
        Table table = tabEnv.sqlQuery(
                "SELECT\n" +
                        "  TUMBLE_START(row_time, INTERVAL '1' MINUTES) AS window_start_time,\n" +
                        "  TUMBLE_END(row_time, INTERVAL '1' MINUTES) AS window_end_time,\n" +
                        "  keyword,\n" +
                        "  count(keyword) AS keyword_count,\n" +
                        "  UNIX_TIMESTAMP() * 1000 AS ts\n" +
                        "  FROM word_log\n" +
                        "GROUP BY\n" +
                        "  TUMBLE(row_time, INTERVAL '1' MINUTES),\n" +
                        "  keyword"
        );
//        table.execute().print();
        // 返回结果
        return table;
    }

    // 自定义UDTF ik 分词器
    @FunctionHint(output = @DataTypeHint("ROW<keyword STRING>"))
    public static class IKAnalyzerFunction extends TableFunction<Row>{
        public void eval(String str) throws Exception {
            List<String> list = AnalyzerUtil.ikAnalyzer(str);
            for (String keyword : list) {
                collect(Row.of(keyword));
            }
        }
    }

    // 创建输入表
    private static void createInputTable(TableEnvironment tabEnv) {

        tabEnv.executeSql("CREATE TABLE dwd_traffic_page_log_kafka_source\n" +
                "(\n" +
                "    common MAP<STRING, STRING> COMMENT '公共环境信息',\n" +
                "    page   MAP<STRING, STRING> COMMENT '页面信息',\n" +
                "    ts     BIGINT,\n" +
                "    row_time AS TO_TIMESTAMP_LTZ(ts, 3),\n" +
                "    WATERMARK FOR row_time AS row_time - INTERVAL '0' MINUTE\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd-traffic-page-log',\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                "    'properties.group.id' = 'gid_dws_traffic_search_keyword',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json',\n" +
                "    'json.fail-on-missing-field' = 'false',\n" +
                "    'json.ignore-parse-errors' = 'true'\n" +
                ")"
        );

    }

    //  创建表执行环境
    private static TableEnvironment getTableEnv() {

        //  1. 创建表执行环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();

        TableEnvironment tabEnv = TableEnvironment.create(settings);

        // 2. 配置属性
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");

        return tabEnv;

    }

}
