package com.millstein.realtime.app.dws;

import com.millstein.realtime.app.base.BaseSqlApp;
import com.millstein.realtime.common.Constants;
import com.millstein.realtime.function.IkAnalyzer;
import com.millstein.realtime.util.SqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Description
 * @Author tsing
 * @Date 2024-10-17 15:38
 */
public class Dws_01_TrafficSourceKeywordPageViewWindow extends BaseSqlApp {

    public static void main(String[] args) {
        new Dws_01_TrafficSourceKeywordPageViewWindow().init(
                8001,
                3,
                "Dws_01_TrafficSourceKeywordPageViewWindow"
        );
    }
    /**
     * 具体数据处理的逻辑，由子类编写
     *
     * @param env      执行环境
     * @param tableEnv 表执行环境
     */
    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv) {
        // 1.读取dwd_traffic_page主题中的数据
        tableEnv.executeSql(
                "create table dwd_traffic_page ( " +
                "    page map<string, string>, " +
                "    ts bigint, " +
                "    et as to_timestamp_ltz(ts, 3), " +
                "    watermark for et as et - interval '5' second " +
                ")" + SqlUtil.getKafkaSourceDDL(
                        Constants.TOPIC_DWD_TRAFFIC_PAGE,
                "Dws_01_TrafficSourceKeywordPageViewWindow"
                )
        );

        // 2.过滤出搜索相关的数据
        Table filterTable = tableEnv.sqlQuery(
                "select " +
                "    `page`['item'] keyword, " +
                "    `ts`, " +
                "    `et` " +
                "from dwd_traffic_page " +
                "where `page`['last_page_id'] = 'search' " +
                "    and `page`['item_type'] = 'keyword' " +
                "    and `page`['item'] is not null"
        );
        tableEnv.createTemporaryView("filter_table", filterTable);

        // 3.注册自定义tableFunction
        tableEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);

        // 4.对搜索关键词进行分词
        Table ikTable = tableEnv.sqlQuery(
                "select " +
                "    rs, " +
                "    ts, " +
                "    et " +
                "from filter_table " +
                "join lateral table(ik_analyzer(keyword)) on true"
        );
        tableEnv.createTemporaryView("ik_table", ikTable);

        // 5.使用tvf函数进行窗口聚合，窗口大小为5s
        Table resultTable = tableEnv.sqlQuery(
                "select " +
                "    date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                "    date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                "    'search' source, " +
                "    rs keyword, " +
                "    date_format(window_start, 'yyyy-MM-dd') cur_date, " +
                "    count(*) keyword_count " +
                "from table(tumble(table ik_table, descriptor (et), interval '5' second)) " +
                "group by window_start, window_end, rs"
        );

        // 6.创建doris-sink的动态表
        tableEnv.executeSql(
                "create table dws_traffic_source_keyword_page_view_window ( " +
                "    stt string, " +
                "    edt string, " +
                "    source string, " +
                "    keyword string, " +
                "    cur_date string, " +
                "    keyword_count bigint " +
                ") with ( " +
                "    'connector' = 'doris', " +
                "    'fenodes' = 'hadoop102:7030', " +
                "    'table.identifier' = 'gmall.dws_traffic_source_keyword_page_view_window', " +
                "    'username' = 'root', " +
                "    'password' = '123456' " +
                ")"
        );

        // 6.将数据写到doris中
        resultTable.executeInsert("dws_traffic_source_keyword_page_view_window");
    }
}
