package com.bwie.realtime.jtp.dwd.log.job;


import com.bwie.realtime.jtp.dwd.log.utils.AnalyzerUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;



import java.util.List;


public class JtpTrafficSearchKeywordMinuteWindowDwsJob {
    public static void main(String[] args) {
//        表执行环境
        TableEnvironment tabEnv = getTableEnv();
//        输入表 映射到kafka消息队列
        createInputTable(tabEnv);

//        处理数据 select
        Table reportTable = handle(tabEnv);
//        输出表
        createOutputTable(tabEnv);

//        保存数据 insert
        saveTosink(tabEnv,reportTable );



    }

    private static void saveTosink(TableEnvironment tabEnv, Table reportTable) {
//        注册Table为表,给予表名称
        tabEnv.createTemporaryView("report_table",reportTable);
//        查询-插入
        tabEnv.executeSql(
        "INSERT INTO dwd_traffic_search_keyword_window_report_doris_sink\n" +
                "SELECT\n" +
                "    SUBSTRING(CAST(window_start_time  AS  STRING),0,19) AS start_time,\n" +
                "    SUBSTRING(CAST(window_end_time  AS  STRING),0,19) AS end_time,\n" +
                "    SUBSTRING(CAST(window_start_time  AS  STRING),0,10) AS cur_date,\n" +
                "    keyword,\n" +
                "    keyword_count\n" +
                "FROM report_table"
        );
    }

    private static void createOutputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql(
        "CREATE TABLE dwd_traffic_search_keyword_window_report_doris_sink\n" +
                "(\n" +
                "    `window_start_time`  STRING COMMENT '窗口开始日期时间',\n" +
                "    `window_end_time`  STRING COMMENT '窗口结束日期时间',\n" +
                "    `cur_date` STRING COMMENT '分区日期',\n" +
                "    `keyword` STRING COMMENT '搜索关键词',\n" +
                "    `keyword_count` BIGINT COMMENT '搜索关键词被搜索次数'\n" +
                ")WITH (\n" +
                "    'connector'='doris',\n" +
                "    'fenodes'='node102:8030',  \n" +
                "    'table.identifier'='jtp_realtime_report.dws_traffic_search_keyword_window_report',\n" +
                "    'username'='root',\n" +
                "    'password'='123456',\n" +
                "    'sink.label-prefix'='doris_label'\n" +
                ")"
        );
    }

    private static Table handle(TableEnvironment tabEnv) {
//        获取搜索词和搜索时间
        Table searchLogTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    page['item'] AS full_word,\n" +
                        "    row_time\n" +
                        "FROM dwd_traffic_page_log_kafka_source\n" +
                        "WHERE page['item'] IS NOT NULL\n" +
                        "AND page['last_page_id'] = 'search'\n" +
                        "AND page['item_type'] = 'keyword'"
        );
        tabEnv.createTemporaryView("search_log_table",searchLogTable);
//        使用自定义udtf函数,对搜索词进行中文分词
        tabEnv.createTemporarySystemFunction("ik_analyzer_udtf", IkAnalyzerFunction.class);

        Table wordLongTable=tabEnv.sqlQuery(
                "SELECT\n" +
                        "    full_word,\n" +
                        "    keyword,\n" +
                        "    row_time\n" +
                        "FROM search_log_table,\n" +
                        "LATERAL TABLE (ik_analyzer_udtf(full_word)) AS T(keyword)"
        );
        tabEnv.createTemporaryView("word_long_table",wordLongTable);

//        设置窗口进行分组聚合计算
        Table reportTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        "    window_start AS window_start_time,\n" +
                        "    window_end AS window_end_time,\n" +
                        "    keyword,\n" +
                        "    COUNT(keyword) AS keyword_count,\n" +
                        "    UNIX_TIMESTAMP()*1000 AS ts\n" +
                        "FROM TABLE(\n" +
                        "        TUMBLE(table word_long_table, DESCRIPTOR(row_time), INTERVAL '1' MINUTE )\n" +
                        "    )t1\n" +
                        "GROUP BY window_start,window_end,keyword"
        );
        return reportTable;
    }

    private static void createInputTable(TableEnvironment tableEnv) {
        tableEnv.executeSql(
            "CREATE TABLE dwd_traffic_page_log_kafka_source\n" +
                "(\n" +
                "    `common` MAP<String,String> COMMENT '公共环境信息',\n" +
                "    `page` MAP<String,String> COMMENT '页面信息',\n" +
                "    `ts` BIGINT,\n" +
                "    row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss.SSS')),\n" +
                "    WATERMARK FOR row_time AS row_time - INTERVAL '0' MINUTE\n" +
                ")WITH (\n" +
                "    'connector'='kafka',\n" +
                "    'topic'='dwd-traffic-page-log',\n" +
                "    'properties.bootstrap.servers'='node101:9092,node102:9092,node103:9092',\n" +
                "    'properties.group.id'='gid_dws_traffic_search_keyword',\n" +
                "    'scan.startup.mode'='earliest-offset',\n" +
                "    'format'='json',\n" +
                "    'json.fail-on-missing-field'='false',\n" +
                "    'json.ignore-parse-errors'='true'\n" +
                ")\n"
        );
    }

    private static TableEnvironment getTableEnv() {
//        环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();

        TableEnvironment tabEnv = TableEnvironment.create(settings);
//        配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone","Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism","1");
        configuration.setString("table.exec.state.tt1","5 s");
        configuration.setString("execution.checkpointing.interval","30 s");
//        返回对象
        return tabEnv;


    }
    @FunctionHint(output = @DataTypeHint("Row<keyword STRING>"))
    public static class IkAnalyzerFunction extends TableFunction<Row> {
        public void eval(String fullWord) throws Exception {
//            中文分词器
            List<String> list = AnalyzerUtils.ikAnalyzer(fullWord);
            for (String keyword : list) {
                collect(Row.of(keyword));
            }
        }
    }
}






















