package app.dws;

import app.BaseSQLApp;
import beans.KeywordBean;
import common.Constant;
import function.KWSplit;
import utils.FlinkSinkUtil;
import utils.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/10/14 08:28
 */
public class Dws_01_DwsTrafficSourceKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_01_DwsTrafficSourceKeywordPageViewWindow().init(
                4001,
                2,
                "Dws_01_DwsTrafficSourceKeywordPageViewWindow"
        );
    }


    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        //1.建立一个动态表与 dwd_traffic_page 关联
        tEnv.executeSql("create table dwd_traffic_page(" +
                "page map<String, String>, " +
                "ts bigint, "+
                "et as to_timestamp_ltz(ts, 3), " + //定义时间属性；et 为 时间属性（3表示ts为ms， 0表示为s）
                "watermark for et as et - interval '3' second " + //interval '3' second 乱序程度
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE, "Dws_01_DwsTrafficSourceKeywordPageViewWindow"));

        //2.过滤出 搜索记录，获取 item（关键词）
        Table kwTable = tEnv.sqlQuery("select " +
                "page['item'] kw, " +
                "et " +
                "from dwd_traffic_page " +
                "where page['page_id'] = 'course_list' " +
                "and page['item_type'] = 'keyword' " +
                "and page['item'] is not null");

        tEnv.createTemporaryView("kw_table", kwTable);

        //3.进行分词（ik or jieba）
        tEnv.createTemporaryFunction("kw_split", KWSplit.class);
        Table keywordTable = tEnv.sqlQuery("select " +
                "keyword, " +
                "et " +
                "from kw_table " +
                "join lateral table(kw_split(kw)) on true");

        tEnv.createTemporaryView("keyword_table", keywordTable);

        //4.统计每次后的每个词的出现的次数: 分组、开窗、聚和
        Table result = tEnv.sqlQuery("select " +
                "date_format( window_start, 'yyyy-MM-dd HH:mm:ss' ) stt, " +
                "date_format( window_end, 'yyyy-MM-dd HH:mm:ss' ) edt, " +
                "'keyword_a' source, " +
                "keyword, " +
                "count(keyword) keyword_count, " +
                "unix_timestamp() ts " +
                "from table(tumble (table keyword_table, descriptor(et), interval '5' second) ) " +
                "group by keyword, window_start, window_end");

        // 5. 写出到 clickhouse 中
        // 把 table 转成流
        tEnv.toRetractStream(result, KeywordBean.class)
                .filter(t -> t.f0)
                .map(t -> t.f1)
                .addSink(FlinkSinkUtil.getClickHouseSink("dws_traffic_source_keyword_page_view_window", KeywordBean.class));

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}