package net.bwie.realtime.jtp.dws.log.job;

import com.oracle.net.Sdp;

import net.bwie.realtime.jtp.dws.log.util.AnalyzerUtil;
import net.bwie.realtime.jtp.dws.log.util.JiebaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;
import org.apache.hadoop.yarn.client.api.impl.TimelineConnector;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.util.List;



public class JtpTrafficSearchKeywordMinuteWindowDwsJob {
    public static void main(String[] args) {
        // TODO: 2025/5/20 表执行环境
        TableEnvironment tabEnv = getTableEnv();
        // TODO: 2025/5/20  输入表input映射到kafka消息队列
        createInPuttable(tabEnv);
        // TODO: 2025/5/20 数据处理
        Table reportTable = handle(tabEnv);
        // TODO: 2025/5/20 输出表output 映射到CK表
        createOutputTable(tabEnv);
        // TODO: 2025/5/20 保存数据
        saveToClickHouse(tabEnv,reportTable);



    }

    private static void saveToClickHouse(TableEnvironment tabEnv, Table reportTable) {
        tabEnv.createTemporaryView( "report_table", reportTable);
        // TODO: 2025/5/20 保存数据
        tabEnv.executeSql(
                "INSERT INTO dws_traffic_search_keyword_window_report_clickhouse_sink\n" +
                        "SELECT\n" +
                        "DATE_FORMAT(window_start_time,'yyyy-MM-dd HH:mm:ss') as window_start_time,\n" +
                        "DATE_FORMAT(window_end_time,'yyyy-MM-dd HH:mm:ss') as window_end_time,\n" +
                        "keyword,\n" +
                        "keyword_count,\n" +
                        "ts\n" +
                        "FROM report_table"
        );
    }

    private static void createOutputTable(TableEnvironment tabEnv) {
        tabEnv.executeSql("" +
                "CREATE TABLE dws_traffic_search_keyword_window_report_clickhouse_sink (\n" +
                "      `window_start_time` STRING COMMENT '窗口开始日期时间',\n" +
                "      `window_end_time` STRING COMMENT '窗口结束日期时间',\n" +
                "      `keyword` STRING COMMENT '搜索关键词',\n" +
                "      `keyword_count` BIGINT COMMENT '搜索关键词被搜索次数',\n" +
                "      `ts` BIGINT COMMENT '数据产生时间戳'\n" +
                ") WITH (\n" +
                "    'connector' = 'clickhouse',\n" +
                "    'url' = 'jdbc:clickhouse://node103:8123/jtp_log_report',\n" +
                "    'table' = 'dws_traffic_search_keyword_window_report',\n" +
                "    'username' = 'default',\n" +
                "    'password' = '',\n" +
                "    'format' = 'json'\n" +
                ")");
    }

    // TODO: 2025/5/20 获取搜索词和搜索时间
    private static Table handle(TableEnvironment tabEnv) {
        Table searchLogTable = tabEnv.sqlQuery(
                "SELECT \n" +
                        "    page['item'] as full_word,\n" +
                        "    row_time\n" +
                        "FROM dwd_traffic_page_log_kafka_source\n" +
                        "where page['item'] is not null \n" +
                        "and page['last_page_id'] = 'search'\n" +
                        "and page['item_type'] = 'keyword'"
        );
        // TODO: 2025/5/20 创建临时视图
        tabEnv.createTemporaryView( "search_log_table", searchLogTable);
        // TODO: 2025/5/20 获取搜索词和搜索时间
        tabEnv.createTemporarySystemFunction("ik", IKAnalyzerFunction.class);
        // TODO: 2025/5/20 使用ik分词获取搜索词和时间
        Table wordLogTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        "full_word,\n" +
                        "keyword,\n" +
                        "row_time\n" +
                        "FROM search_log_table,\n" +
                        "lateral table(ik(full_word)) as T(keyword)"
        );
        // TODO: 2025/5/20 创建临时视图
        tabEnv.createTemporaryView( "word_log_table", wordLogTable);
        // TODO: 2025/5/20 设置窗口进行聚合
        Table reportTable = tabEnv.sqlQuery(
                "SELECT\n" +
                        " tumble_start(row_time, interval '1' minutes) as window_start_time,\n" +
                        " tumble_end(row_time, interval '1' minutes) as window_end_time,\n" +
                        " keyword,\n" +
                        " count(keyword) as keyword_count,\n" +
                        " unix_timestamp() *1000 as ts\n" +
                        "FROM word_log_table\n" +
                        "GROUP BY\n" +
                        "    tumble(row_time,interval '1' minutes),keyword"
        );


        return reportTable;


    }

    private static void createInPuttable(TableEnvironment tabEnv) {
        tabEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_page_log_kafka_source\n" +
                "(\n" +
                "    `common` MAP<STRING, STRING> COMMENT '公共环境信息',\n" +
                "    `page`   MAP<STRING, STRING> COMMENT '页面信息',\n" +
                "    `ts`     BIGINT,\n" +
                "    row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss.SSS')),\n" +
                "    WATERMARK FOR row_time AS row_time - INTERVAL '0' MINUTE\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd_log_page_view_log',\n" +
                "    'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
                "    'properties.group.id' = 'gid_dws_traffic_search_keyword',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json',\n" +
                "    'json.fail-on-missing-field' = 'false',\n" +
                "    'json.ignore-parse-errors' = 'true'\n" +
                ")"
        );
    }

    private static TableEnvironment getTableEnv() {
        //  获取执行环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .useAnyPlanner()
                .build();
        //  创建表执行环境
        TableEnvironment tabEnv = TableEnvironment.create(settings);
        //  设置配置
        Configuration configuration = tabEnv.getConfig().getConfiguration();
        //  设置时区
        configuration.setString("table.local-time-zone",  "Asia/Shanghai");
        //  设置默认并行度
        configuration.setString("table.exec.resource.default-parallelism","1");
        //  设置状态的过期时间
        configuration.setString("table.exec.state.ttl","5 s");
        return tabEnv;
    }
    //  TODO: 2025/5/20 自定义函数
    @FunctionHint(
            output = @DataTypeHint("ROW<word STRING>")
    )
    public static class IKAnalyzerFunction extends TableFunction<Row> {
        //  输入一个字符串，输出一个字符串
        public void eval(String str) throws Exception{
            //  调用工具类
            List<String> list = AnalyzerUtil.ikAnalyzer(str);
            //  遍历
            for (String s : list) {
                collect(Row.of(s));
            }
        }
    }

    @FunctionHint(
            output = @DataTypeHint("ROW<word STRING>")
    )
    public static class JiebaAnalyzerFunction extends TableFunction<Row> {
        //  输入一个字符串，输出一个字符串
        public void eval(String str) throws Exception{
            //  调用工具类
            List<String> list = JiebaUtil.jiebaAnalyzer(str);
            //  遍历
            for (String s : list) {
                collect(Row.of(s));
            }
        }
    }




}
