package net.bwie.realtime.jtp.dws.log.job;

import net.bwie.realtime.jtp.utils.AnalyzerUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

import java.util.List;

public class JtpTrafficSearchKeywordMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {

        // TODO 表执行环境
        TableEnvironment tabEnv = getTableEnv() ;
        // TODO 输入表-input,映射到kafka消息队列
        createInputTable(tabEnv);
//        tabEnv.executeSql("select * from dws_traffic_page_log_kafka_source").print();
        // TODO 数据处理
        Table reportTable = handle(tabEnv);
        // TODO 输出表output,映射Doris
        createOutputTable(tabEnv);
        // TODO 保存数据-insert,映射Doris
        saveToSinkDoris(tabEnv,reportTable);
    }

    private static void saveToSinkDoris(TableEnvironment tabEnv, Table reportTable) {
        tabEnv.createTemporaryView("report_table", reportTable);
        tabEnv.executeSql("insert into dws_traffic_search_keyword_window_report_doris_sink\n" +
                "select\n" +
                "    SUBSTRING(CAST(window_start AS STRING ),0,19) as window_start_time,\n" +
                "    SUBSTRING(CAST(window_end AS STRING ),0,19) as window_end_time,\n" +
                "    SUBSTRING(CAST(window_start AS STRING ),0,10) as cur_date,\n" +
                "    keyword,\n" +
                "    keyword_count\n" +
                "from\n" +
                "    report_table");
    }


    private static void createOutputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql("CREATE TABLE dws_traffic_search_keyword_window_report_doris_sink\n" +
                "(\n" +
                "    `window_start_time` STRING COMMENT '窗口开始日期时间',\n" +
                "    `window_end_time`   STRING COMMENT '窗口结束日期时间',\n" +
                "    `cur_date`          STRING COMMENT '分区日期',\n" +
                "    `keyword`           STRING COMMENT '搜索关键词',\n" +
                "    `keyword_count`     BIGINT COMMENT '搜索关键词被搜索次数'\n" +
                ")WITH (\n" +
                "    'connector' = 'doris',\n" +
                "    'fenodes' = 'node102:8030',\n" +
                "    'table.identifier' = 'jtp_realtime_report.dws_traffic_search_keyword_window_report_doris_sink',\n" +
                "    'username' = 'root',\n" +
                "    'password' = '123456',\n" +
                "    'sink.label-prefix' = 'doris_label'\n" +
                ")"
        );
    }

    private static Table handle(TableEnvironment tabEnv) {
        Table searchLogTable = tabEnv.sqlQuery("select\n" +
                "    page['item'] as full_word\n" +
                "    , row_time\n" +
                "from\n" +
                "    dws_traffic_page_log_kafka_source\n" +
                "where page['item'] is not null\n" +
                "and page['last_page_id'] = 'search'\n" +
                "and page['item_type'] = 'keyword'\n"
                );

        tabEnv.createTemporaryView("search_log_table", searchLogTable);

        tabEnv.createTemporaryFunction("ik_analyzer_udtf", IkAnalyzerFunction.class);

        Table wordLogTable = tabEnv.sqlQuery("select\n" +
                "    full_word\n" +
                "    , keyword\n" +
                "    , row_time\n" +
                "from\n" +
                "    search_log_table,\n" +
                "LATERAL TABLE(ik_analyzer_udtf(full_word)) AS T(keyword)"
        );
        tabEnv.createTemporaryView("word_log_table", wordLogTable);

        Table reportTable = tabEnv.sqlQuery("select\n" +
                "    window_start,\n" +
                "    window_end,\n" +
                "    keyword,\n" +
                "    count(keyword) as keyword_count\n" +
                "from\n" +
                "    TABLE(\n" +
                "        TUMBLE(TABLE word_log_table,DESCRIPTOR(row_time),INTERVAL '1' MINUTE)\n" +
                "    ) t1\n" +
                "group by\n" +
                "    window_start,\n" +
                "    window_end,\n" +
                "    keyword"
        );

        return reportTable;
    }


    private static void createInputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql("create table if not exists dws_traffic_page_log_kafka_source\n" +
                "(\n" +
                "    common Map<string, string>,\n" +
                "    page map<string, string>,\n" +
                "    ts bigint,\n" +
                "    row_time as to_timestamp(from_unixtime(ts / 1000, 'yyyy-MM-dd HH:mm:ss')),\n" +
                "    watermark for row_time as row_time - interval '0' minutes\n" +
                ")\n" +
                "with (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd-traffic-page-log',\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                "    'properties.group.id' = 'testGroup',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json'\n" +
                ")"
        );
    }

    private static TableEnvironment getTableEnv() {
        // 1环境属性设置
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();
        TableEnvironment tabEnv = TableEnvironment.create(settings) ;
// 2配置属性设置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");
        configuration.setString("execution.checkpointing.interval", "5 s");
        return tabEnv;
    }
    @FunctionHint(output = @DataTypeHint("ROW<keyword String>"))
    public static class IkAnalyzerFunction extends TableFunction<Row> {
        public void eval(String content) throws Exception {

            List<String> list = AnalyzerUtil.ikAnalyzer(content);

            for (String s : list){
                collect(Row.of(s));
            }
        }
    }
}
