package com.atguigu.PracticalProject.app.dws;

import com.atguigu.PracticalProject.app.BaseSqlApp;
import com.atguigu.PracticalProject.common.Constant;
import com.atguigu.PracticalProject.function.IkAnalyzer;
import com.atguigu.PracticalProject.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().init(
                3022,
                2,
                "DwsTrafficSourceKeywordPageViewWindow"
        );
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        // 1. 创建动态表与topic关联: page
        tEnv.executeSql("create table page(" +
                " page map<string, string>, " +
                " ts bigint, " +
                " et as to_timestamp_ltz(ts, 3), " +
                " watermark for et as et - interval '3' second " +
                ")"
                + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE, "DwsTrafficSourceKeywordPageViewWindow"));

        // 2. 过滤出关键词
        Table keywordTable = tEnv.sqlQuery("select " +
                "page['item'] keyword, " +
                "et " +
                "from page " +
                "where " +
                "page['item_type'] = 'keyword' " +
                "and page['item'] is not null");
        tEnv.createTemporaryView("keyword_table",keywordTable);

        // 3. 对关键词进行分词
        // 自定义函数  标量 制表 聚合 制表聚合
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        Table kwTable = tEnv.sqlQuery("select" +
                " kw, " +
                " et " +
                "from keyword_table " +
                "join lateral table(ik_analyzer(keyword)) on true");
        tEnv.createTemporaryView("kw_table",kwTable);

        // 4. 对分词后的次,开窗聚合统计次数
        Table result = tEnv.sqlQuery("select " +
                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                " kw keyword,  " +
                " date_format(window_start, 'yyyy-MM-dd') cur_date, " +  // 这个是表示统计日期
                " count(*) keyword_count " +
                "from table( tumble( table kw_table, descriptor(et), interval '5' second ) ) " +
                "group by window_start, window_end, kw");

        // 5. 写出到 doris 中
        tEnv.executeSql("create table kw(" +
                " stt string, " +
                " edt string, " +
                " keyword string, " +
                " cur_date string, " +
                " keyword_count bigint " +
                ")with(" +
                "  'connector' = 'doris', " +
                "  'fenodes' = 'hadoop102:7030', " +
                "  'table.identifier' = 'edu.dws_traffic_source_keyword_page_view_window', " +
                "  'username' = 'root', " +
                "  'password' = '123321' " +
                ")");

        result.executeInsert("kw");
    }
}
