package org.example.realtime.jtp.dws.log.job;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.example.realtime.jtp.dws.log.function.IkAnalyzerFunction;

/**
 * 搜索关键词实时统计，其中使用IKAnalyzer分词器进行分词，采用FlinkSQL方式数据处理。
 */
public class JtpTrafficSearchKeywordMinuteWindowDwsJob {

    public static void main(String[] args) {
        //1.执行环境
        TableEnvironment tableEnv = getTableEnv();
        //2.输入表input 映射到kafka消息队列
        createInputTable(tableEnv);
        //3.数据处理-select
        Table reportTable = handle(tableEnv);
        //4.数据接收器
        createOutputTable(tableEnv);
        //5.执行任务
        saveToClickHouse(tableEnv,reportTable);
    }

    private static void createOutputTable(TableEnvironment taEnv) {

        taEnv.executeSql("CREATE TABLE if not exists dws_traffic_search_keyword_window_report_clickhouse_sink (\n" +
                "    `window_start_time` STRING COMMENT '窗口开始日期时间',\n" +
                "    `window_end_time` STRING COMMENT '窗口结束日期时间',\n" +
                "    `keyword` STRING COMMENT '搜索关键词',\n" +
                "    `keyword_count` BIGINT COMMENT '搜索关键词被搜索次数',\n" +
                "    `ts` BIGINT COMMENT '数据产生时间戳'\n" +
                ") WITH (\n" +
                "    'connector' = 'clickhouse',\n" +
                "    'url' = 'jdbc:clickhouse://node103:8123/jtp_log_report',\n" +
                "    'table' = 'dws_traffic_search_keyword_window_report',\n" +
                "    'username' = 'default',\n" +
                "    'password' = '',\n" +
                "    'format' = 'json'\n" +
                ")");
    }

    private static void saveToClickHouse(TableEnvironment tableEnv, Table reportTable) {
        //注册Table为表，给以表名
        tableEnv.createTemporaryView("report_table",reportTable);
        //查询-插入
        tableEnv.executeSql(
                "insert into dws_traffic_search_keyword_window_report_clickhouse_sink\n" +
                        "select\n" +
                        "    DATE_FORMAT(window_start_time,'yyyy-MM-dd HH:mm:ss') as window_start_time\n" +
                        ",DATE_FORMAT(window_end_time,'yyyy-MM-dd HH:mm:ss') as window_end_time\n" +
                        ",keyword\n" +
                        ",keyword_count\n" +
                        ",ts\n" +
                        "from report_table"
        );
    }

    private static Table handle(TableEnvironment tableEnv) {
        Table searchLogTable = tableEnv.sqlQuery(
                "select\n" +
                "    page['item'] as full_word\n" +
                "    ,row_time\n" +
                "from dwd_traffic_page_log_kafka_source\n" +
                "where page['item'] is not null\n" +
                "    and page['last_page_id']='search'\n" +
                "    and page['item_type']='keyword'");
        //wordLogTable.execute().print();
        tableEnv.createTemporaryView("search_log_table",searchLogTable);
        //s2使用自定义UDTF函数对搜索词进行中文分词
        tableEnv.createTemporarySystemFunction("ik_analyzer_udtf", IkAnalyzerFunction.class);
        Table wordLogTable = tableEnv.sqlQuery(
                "select\n" +
                        "    full_word\n" +
                        "    ,keyword\n" +
                        "    ,row_time\n" +
                        "from search_log_table,\n" +
                        "    lateral table(ik_analyzer_udtf(full_word)) as T(keyword)");
        //wordLogTable.execute().print();
        tableEnv.createTemporaryView("word_log_table",wordLogTable);
        //s3 设置窗口进行分组、聚合计算
        Table reportTable = tableEnv.sqlQuery(
                "select\n" +
                        "    TUMBLE_START(row_time, INTERVAL '1' minutes) as window_start_time\n" +
                        "    ,TUMBLE_END(row_time, INTERVAL '1' minutes) as window_end_time\n" +
                        "    ,keyword\n" +
                        "    ,count(keyword) as keyword_count\n" +
                        "    ,UNIX_TIMESTAMP()*1000 as ts\n" +
                        "from word_log_table\n" +
                        "group by TUMBLE(row_time, INTERVAL '1' minutes), keyword"
        );
        /*
        Tumble 是一个用于定义滚动时间窗口（Tumbling Windoe）的表值函数（TVF）
        滚动将数据按照固定大小进行切分，窗口之间没有重叠不会包含重复数据。
        逻辑：子查询会将表中数据按照row—time字段进行划分到1分钟的滚动窗口中
        生成两个额外的字段 window_start 和 window_end，分别表示窗口的开始时间和结束时间。
        最外层通过查询group by window——start window——wnd keybyword 聚合统计出每个关键词的出现次数pl
         */
        /**
        tableEnv.sqlQuery("select\n" +
                "    window_start as window_start_time,\n" +
                "    window_end as window_end_time,\n" +
                "    keyword,\n" +
                "    count(keyword) as keyword_count,\n" +
                "    UNIX_TIMESTAMP()*1000 as ts\n" +
                "from table (\n" +
                "    TUMBLE(TABLE word_log_table,DESCRIPTOR(row_time),INTERVAL '1' minutes)\n" +
                "    ) t1\n" +
                "group by window_start,window_end,keyword;");
         */

        return reportTable;
    }

    private static void createInputTable(TableEnvironment tableEnv) {
        tableEnv.executeSql(
                "CREATE TABLE if not exists dwd_traffic_page_log_kafka_source\n" +
                        "(\n" +
                        "    `common` MAP<STRING, STRING> COMMENT '公共环境信息',\n" +
                        "    `page`   MAP<STRING, STRING> COMMENT '页面信息',\n" +
                        "    `ts`     BIGINT,\n" +
                        //"   row_time as  TO_TIMESTAMP_LTZ(ts, 3) ,\n" +
                        "    row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss.SSS')),\n" +
                        "    WATERMARK FOR row_time AS row_time - INTERVAL '0' MINUTE\n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dwd-traffic-page-log',\n" +
                        "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                        "    'properties.group.id' = 'gid_dws_traffic_search_keyword',\n" +
                        "    'scan.startup.mode' = 'earliest-offset',\n" +
                        "    'format' = 'json',\n" +
                        "    'json.fail-on-missing-field' = 'false',\n" +
                        "    'json.ignore-parse-errors' = 'true'\n" +
                        ")"
        );
    }

    private static TableEnvironment getTableEnv() {
        //环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .useBlinkPlanner()
                .build();

        TableEnvironment tableEnv = TableEnvironment.create(settings);
        //配置属性设置
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone","Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism","1");
        configuration.setString("table.exec.state.ttl","5 s");

        //返回对象
        return tableEnv;
    }
}
