package net.bwie.realtime.jtp.dws.log.job;

import net.bwie.realtime.jtp.dws.log.bean.IkAnalyzeFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

/**
 *
 * 当日APP流量日志数据进行实时汇总统计, 搜索关键词的个数
 */

public class JtpTrafficSearchKeywordMinuteWindowDwsJob {
    public static void main(String[] args) {
        //1.创建执行环境
        TableEnvironment tableEnv = getTableEnv();
        //2.创建输入表
        createInputTable(tableEnv);
        //3.处理数据
        Table handle = handle(tableEnv);
        //4.创建输出表
        createOutputTable(tableEnv);
        //5.保存到clickhouse

        saveToClickHouse(tableEnv,handle);
    }

    private static void createOutputTable(TableEnvironment tableEnv) {
        tableEnv.executeSql(
                "CREATE TABLE IF NOT EXISTS dws_traffic_search_keyword_window_report_clickhouse_sink\n" +
                        "(\n" +
                        " `window_start_time` STRING COMMENT '窗口开始日期时间',\n"+
                        " `window_end_time` STRING COMMENT '窗口结束日期时间',\n"+
                        " `keyword` STRING COMMENT '搜索关键词',\n" +
                        " `keyword_count` BIGINT COMMENT '搜索关键词的个数',\n" +
                        " `ts` BIGINT COMMENT '系统时间'"+
                        ") WITH (\n"  +
                        "'connector' = 'clickhouse',\n" +
                        "'url' = 'jdbc:clickhouse://node103:8123/jtp_log_report',\n" +
                        "'table' = 'dws_traffic_search_keyword_window_report',\n" +
                        "'username' = 'default',\n" +
                        "'password' = '',\n" +
                        "'format' = 'json'\n" +
                        ")"
        );
    }

    private static void saveToClickHouse(TableEnvironment tableEnv, Table handle) {
        tableEnv.createTemporaryView("report_table",handle);
        //输出表
        tableEnv.executeSql(
                "INSERT INTO dws_traffic_search_keyword_window_report_clickhouse_sink SELECT\n " +
                        "  DATE_FORMAT(window_start_time,'yyyy-MM-dd HH:mm:ss') AS window_start_time\n" +
                        " ,DATE_FORMAT(window_end_time,'yyyy-MM-dd HH:mm:ss') AS window_end_time\n" +
                        " ,keyword\n"+
                        " ,keyword_count\n" +
                        " ,ts\n"+
                        " from report_table");
        handle.execute().print();

    }

    private static Table handle(TableEnvironment tableEnv) {
        Table table = tableEnv.sqlQuery(
                "SELECT \n" +
                        " page['item'] AS full_word\n" +
                        " ,row_time\n" +
                        " FROM dwd_traffic_page_log_kafka_source\n" +
                        " WHERE page['item'] IS NOT NULL\n" +
                        " AND page['last_page_id']='search'\n" +
                        " AND page['item_type']='keyword'"
        );
//        table.execute().print();
        //3.创建输出表
        tableEnv.createTemporaryView("search_log_table", table);
        //创建udtf
        tableEnv.createTemporarySystemFunction("ik_analyzer_udtf", IkAnalyzeFunction.class);
        // IK分词
        Table wordLogTable = tableEnv.sqlQuery(
                "SELECT \n" +
                        " full_word\n" +
                        " ,keyword\n" +
                        " ,row_time\n" +
                        "FROM search_log_table ,\n" +
                        " LATERAL TABLE(ik_analyzer_udtf(full_word)) AS T(keyword)"
        );
//        wordLogTable.execute().print();
        tableEnv.createTemporaryView("word_log_table", wordLogTable);

        //5.处理数据
        Table reportTable = tableEnv.sqlQuery(
                "SELECT\n" +
                        "TUMBLE_START(row_time,INTERVAL '1' MINUTES) AS window_start_time\n" +
                        "  ,TUMBLE_END(row_time,INTERVAL '1' MINUTES) AS window_end_time\n" +
                        "  ,keyword\n" +
                        "  ,count(keyword) AS keyword_count\n" +
                        "  ,UNIX_TIMESTAMP()*1000 AS ts\n" +
                        "FROM word_log_table\n" +
                        " GROUP BY\n" +
                        " TUMBLE(row_time,INTERVAL '1' MINUTES),\n" +
                        " keyword"
        );
        return reportTable;

    }

    private static void createInputTable(TableEnvironment tableEnv) {
        tableEnv.executeSql("CREATE TABLE IF NOT EXISTS dwd_traffic_page_log_kafka_source\n" +
                "(\n" +
                "    `common` MAP<STRING,STRING> COMMENT'公共环境信息',\n" +
                "`page` MAP<STRING,STRING> COMMENT '页面信息',\n" +
                "`ts` BIGINT,\n" +
                "row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss.SSS')),\n" +
                "WATERMARK FOR row_time AS row_time - INTERVAL '0' MINUTE\n" +
                ")WITH(\n" +
                "'connector' = 'kafka',\n" +
                "'topic' = 'dwd-traffic-page-log',\n" +
                "'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                "'properties.group.id' = 'gid_dws_traffic_search_keyword',\n" +
                "'scan.startup.mode' = 'earliest-offset',\n" +
                "'format' = 'json',\n" +
                "'json.fail-on-missing-field' = 'false',\n" +
                "'json.ignore-parse-errors' = 'true'" +
                ")"
        );
    }

    private static TableEnvironment getTableEnv() {
        //  创建TableEnv
        EnvironmentSettings build = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        TableEnvironment tableEnv = TableEnvironment.create(build);
        //设置时区
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
        configuration.setString("table.exec.resource.default-parallelism", "1");
        configuration.setString("table.exec.state.ttl", "5 s");
        return tableEnv;

    }
}
