package com.bw.gmall.reaktime.dws.app;

import com.bw.gmall.reaktime.dws.function.KwSplit;
import com.bw.gmall.realtime.common.base.BaseSQLApp;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.SQLUtil;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().start(
                10051,
                4,
                "dws_traffic_source_keyword_page_view_window"
        );
    }

    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        // 1. 读取 页面日志
        tEnv.executeSql("create table page_log(" +
                " page map<string, string>, " +
                " ts bigint, " +
                " et as to_timestamp_ltz(ts, 3), " +
                " watermark for et as et - interval '5' second " +
                ")" + SQLUtil.getKafkaDDLSource("dws_traffic_source_keyword_page_view_window", Constant.TOPIC_DWD_TRAFFIC_PAGE));

        // 2. 读取搜索关键词
        Table kwTable = tEnv.sqlQuery("select " +
                "page['item'] kw, " +
                "et " +
                "from page_log " +
                "where ( page['last_page_id'] ='search' " +
                "        or page['last_page_id'] ='home' " +
                "       )" +
                "and page['item_type']='keyword' " +
                "and page['item'] is not null ");
        tEnv.createTemporaryView("kw_table", kwTable);
//        kwTable.execute().print();
        // 3. 自定义分词函数
        tEnv.createTemporaryFunction("kw_split", KwSplit.class);

        Table keywordTable = tEnv.sqlQuery("select " +
                " keyword, " +
                " et " +
                "from kw_table " +
                "join lateral table(kw_split(kw)) on true ");
        tEnv.createTemporaryView("keyword_table", keywordTable);
//keywordTable.execute().print();

        // 3. 开窗聚和 tvf
        Table result = tEnv.sqlQuery("select " +
                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                " date_format(window_start, 'yyyyMMdd') cur_date, " +
                " keyword," +
                " count(*) keyword_count " +
                "from table( tumble(table keyword_table, descriptor(et), interval '5' second ) ) " +
                "group by window_start, window_end, keyword ");
//        result.execute().print();
        // 5. 写出到 doris 中
        tEnv.executeSql("CREATE TABLE dws_traffic_source_keyword_page_view_window (\n" +
                " stt  STRING,\n" +
                " edt STRING,\n" +
                " cur_date STRING,\n" +
                " keyword  STRING,\n" +
                " keyword_count BIGINT\n" +
                ")" + SQLUtil.getDorisSinkSQL(Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW));

        result.insertInto(Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW).execute();
//        result.execute().print();


    }

}

