package com.mai.realtime.app.dws;

import com.mai.realtime.app.BaseSqlApp;
import com.mai.realtime.common.Constant;
import com.mai.realtime.function.IkAnalyzer;
import com.mai.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * com.mai.realtime.app.dws.DwsTrafficKeywordPageViewWindow
 * 关键词统计
 */
public class DwsTrafficKeywordPageViewWindow extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsTrafficKeywordPageViewWindow().init(
                4002,
                2,
                "DwsTrafficKeywordPageViewWindow"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {

        // 1:创建动态表page
        tEnv.executeSql("create table page(" +
                " page map<string,string>, " +
                " ts bigint," +
                " et as to_timestamp_ltz(ts,3)," +
                " watermark for et as et -interval '3' second " +
                ")"
                + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE,"Edu_02_DwsTrafficKeywordPageViewWindow"));


        // 2:过滤出关键词
        Table keywordTable = tEnv
                .sqlQuery("select " +
                        " page['item'] keyword," +
                        " et" +
                        " from page" +
                        " where  page['item_type']='keyword' " +
                        " and page['item'] is not null" +
                        "");

        tEnv.createTemporaryView("keyword_table",keywordTable);

        // 3:对关键次进行分词
        // 自定义函数 ：制表函数
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);

        Table kwTable = tEnv.sqlQuery("select " +
                " kw," +
                " et" +
                " from keyword_table " +
                " join lateral table(ik_analyzer(keyword)) on true");

        tEnv.createTemporaryView("kw_table", kwTable);

        // 4:对分词后的词，开窗聚合统计次数
        Table result = tEnv.sqlQuery("select " +
                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                " 'search' source, " +
                " kw keyword, " +
                " date_format(window_start, 'yyyy-MM-dd') cur_date, " +
                " count(*) keyword_count " +
                " from table( tumble( table kw_table,descriptor(et), interval '5' second ))" +
                "group by window_start, window_end, kw");



/*        // 5:写出到doris中
        tEnv.executeSql("create table kw(" +
                " stt string, " +
                " edt string, " +
                " source string, " +
                " keyword string, " +
                " cur_date string, " +
                " keyword_count bigint " +
                ")with(" +
                "  'connector' = 'doris', " +
                "  'fenodes' = 'hadoop102:7030', " +
                "  'table.identifier' = 'edu.dws_traffic_source_keyword_page_view_window', " +
                "  'username' = 'root', " +
                "  'password' = '123321' " +
                ")");*/

        result.execute().print();

        //result.executeInsert("kw");
    }
}
