package net.bwie.realtime.jtp.dws.log.job;


import net.bwie.realtime.jtp.dws.log.utils.AnalyzerUtil;
import org.apache.flink.configuration.Configuration;

import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

import java.util.List;


/**
 * @BelongsProject: realtime-project-10zlq
 * @BelongsPackage: net.bwie.realtime.jtp.dws.log.job
 * @Author: zhangleqing
 * @CreateTime: 2025-08-19  15:52
 * @Description: TODO 搜索关键词实时统计，其中使用IK分词器进行分词，采用flinkSQL方式数据处理
 * @Version: 1.0
 */
public class JtpTrafficSearchKeywordMinuteWindowDwsJob {
    public static void main(String[] args) {
        // 1.表执行环境
        TableEnvironment tabEnv = getTableEnv();

        // 2.输入表-input：映射到Kafka消息队列
        createInputTable(tabEnv);
        //tabEnv.executeSql("select * from dwd_traffic_page_log_kafka_source limit 10").print();

        // 3.数据处理-select
        Table reportTable = handle(tabEnv);

        // 4.输出表-output：映射到Doris表
        createOutputTable(tabEnv);

        // 5.保存数据-insert  需要我们提前在Doris创建好表
        saveToSink(tabEnv, reportTable);
    }


    /**
     * FlinkSQL中自定义UDTF函数，使用IKAnalyzer分词器对搜索词进行分词
     * https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/dev/table/functions/udfs/#table-functions
     */
    @FunctionHint(output = @DataTypeHint("Row<keyword STRING>"))
    public static class IkAnalyzerFunction extends TableFunction<Row> {
        public void eval(String fullWord) throws Exception{
            // 中文分词
            List<String> list = AnalyzerUtil.ikAnalyzer(fullWord);
            // 循环遍历输出
            for (String keyword : list) {
                collect(Row.of(keyword));
            }
        }
    }


    /**
     * 环境配置
     */
    private static TableEnvironment getTableEnv() {
        // 1.环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings);
        // 2.配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");
        configuration.setString("execution.checkpointing.interval", "5 s");
        // 3.返回对象
        return tabEnv;
    }


    /**
     * FlinkSQL中输入表：构建DDL语句，创建FlinkSQL表映射到Kafka消息队列
     */
    private static void createInputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql(
             "CREATE TABLE dwd_traffic_page_log_kafka_source\n" +
                     "(\n" +
                     "    `common` MAP<STRING, STRING> COMMENT '公共环境信息',\n" +
                     "    `page`   MAP<STRING, STRING> COMMENT '页面信息',\n" +
                     "    `ts`     BIGINT,\n" +
                     "    row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss.SSS')),\n" +
                     "    WATERMARK FOR row_time AS row_time - INTERVAL '0' MINUTE\n" +
                     ") WITH (\n" +
                     "    'connector' = 'kafka',\n" +
                     "    'topic' = 'dwd-traffic-page-log',\n" +
                     "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                     "    'properties.group.id' = 'gid_dws_traffic_search_keyword',\n" +
                     "    'scan.startup.mode' = 'earliest-offset',\n" +
                     "    'format' = 'json',\n" +
                     "    'json.fail-on-missing-field' = 'false',\n" +
                     "    'json.ignore-parse-errors' = 'true'\n" +
                     ")"
        );
    }


    /**
     * FlinkSQL中编写SELECT查询语句，处理分析数据
     */
    private static Table handle(TableEnvironment tabEnv) {
        // s1-获取搜索词和搜索时间
        Table searchLogTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    page['item'] as full_word,\n" +
                        "    row_time\n" +
                        "FROM dwd_traffic_page_log_kafka_source\n" +
                        "where page['item'] is not null\n" +
                        "and page['last_page_id'] = 'search'\n" +
                        "and page['item_type'] = 'keyword'"
        );

        tabEnv.createTemporaryView("search_log_table", searchLogTable);

        // s2-使用自定义UDTF函数，对搜索词进行中文分词
        tabEnv.createTemporarySystemFunction("ik_analyzer_udtf", IkAnalyzerFunction.class);
        Table wordLogTable = tabEnv.sqlQuery(
           "SELECT\n" +
                   "    full_word,\n" +
                   "    keyword,\n" +
                   "    row_time\n" +
                   "FROM search_log_table,\n" +
                   "lateral table(ik_analyzer_udtf(full_word)) as T(keyword)"
        );
        tabEnv.createTemporaryView("word_log_table", wordLogTable);

        // s3-设置窗口进行分组，聚合计算
        Table reportTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    window_start as window_start_time,\n" +
                        "    window_end as window_end_time,\n" +
                        "    keyword,\n" +
                        "    count(keyword) as keyword_count,\n" +
                        "    unix_timestamp() * 1000 as ts\n" +
                        "FROM table (\n" +
                        "        tumble(table word_log_table, descriptor(row_time), interval '1'  minutes)\n" +
                        "    ) t1\n" +
                        "GROUP BY window_start, window_end, keyword"
        );
        return reportTable;
    }


    /**
     * FlinkSQL中输出表：构建DDL语句，创建FlinkSQL表映射到Doris表
     */
    private static void createOutputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql(
            "CREATE TABLE dws_traffic_search_keyword_window_report_doris_sink (\n" +
                    "    `window_start_time` STRING COMMENT '窗口开始日期时间',\n" +
                    "    `window_end_time` STRING COMMENT '窗口结束日期时间',\n" +
                    "    `cur_date`        STRING COMMENT '分区日期',\n" +
                    "    `keyword` STRING COMMENT '搜索关键词',\n" +
                    "    `keyword_count` BIGINT COMMENT '搜索关键词被搜索次数'\n" +
                    ") WITH (\n" +
                    "    'connector' = 'doris',\n" +
                    "    'fenodes' = 'node102:8030',\n" +
                    "    'table.identifier' = 'jtp_realtime_report.dws_traffic_search_keyword_window_report',\n" +
                    "    'username' = 'root',\n" +
                    "    'password' = '123456',\n" +
                    "    'sink.label-prefix' = 'doris-label'\n" +
                    ")"
        );
    }


    /**
     * FlinkSQL中将查询结果保存到Doris表中，使用insert语句
     */
    private static void saveToSink(TableEnvironment tabEnv, Table reportTable) {
        // a. 注册Table表，给予表名称
        tabEnv.createTemporaryView("report_table", reportTable);
        // b. 查询-插入
        tabEnv.executeSql(
            "INSERT INTO dws_traffic_search_keyword_window_report_doris_sink\n" +
                    "SELECT\n" +
                    "    substring(cast(window_start_time as string), 0, 19) as start_time,\n" +
                    "    substring(cast(window_end_time as string), 0 ,19) as end_time,\n" +
                    "    substring(cast(window_start_time as string), 0, 10) as cur_date,\n" +
                    "    keyword,\n" +
                    "    keyword_count\n" +
                    "FROM report_table"
        );
    }
}
