package app.dws;

import Bean.KeywordBean;
import app.dwd.BaseSQLApp;
import common.Constant;
import function.KWSplit;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.FlinkSinkUtil;
import util.SQLUtil;

public class Dws_DwsTrafficSourceKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_DwsTrafficSourceKeywordPageViewWindow().init(
                4011,
                2,
                "Dws_DwsTrafficSourceKeywordPageViewWindow"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        // 1. 建立一个动态表与 dwd 页面日志关联
        tEnv.executeSql("create table dwd_traffic_page(" +
                "page map<String,String>, " +
                "ts bigint, " +
                "et as to_timestamp_ltz(ts,3), " +
                "watermark for et as et - interval '3' second " +
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE,"Dws_DwsTrafficSourceKeywordPageViewWindow"));
        // 2. 过滤搜索记录, 读取搜索关键词
        Table kwTable = tEnv.sqlQuery("select " +
                " page['item'] kw, " +
                " et " +
                " from dwd_traffic_page " +
                " where " +
                " page['item_type']='keyword' " +
                " and page['item'] is not null ");
        tEnv.createTemporaryView("kw_table",kwTable);
        // 3. 对关键词进行分词
        tEnv.createTemporaryFunction("kw_split", KWSplit.class);
        Table kewordTable = tEnv.sqlQuery("select " +
                "keyword, " +
                "et " +
                "from kw_table " +
                "join lateral table(kw_split(kw)) on true");
        tEnv.createTemporaryView("keyword_table",kewordTable);
        // 4. 统计每次后的每个词的出现的次数: 分组开窗聚和
        Table result = tEnv.sqlQuery("select " +
                "date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt, " +
                "date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt, " +
                "'无' source, " +
                "keyword, " +
                "count(keyword) keyword_count, " +
                "unix_timestamp()*1000 ts " +
                "from table(tumble(table keyword_table,descriptor(et),interval '5' second)) " +
                "group by keyword,window_start,window_end");
        // 5. 写出到 clickhouse 中
        // 把 table 转成流

        tEnv
                .toRetractStream(result, KeywordBean.class)
                .filter(t -> t.f0)
                .map(t -> t.f1)
                .addSink(FlinkSinkUtil.getClickHouseSink("dws_traffic_source_keyword_page_view_window",KeywordBean.class));
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
