package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.bean.KeywordStats;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.function.KeyWordUdtf;
import com.atguigu.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author chenlongStart
 * @create 2021-07-05 9:53
 * @desc
 */
    public class DwsKeywordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsKeywordStatsApp().init(4004, 1, "DwsKeywordStatsApp");
    }

    @Override
    protected void run(StreamTableEnvironment tEnv) {
        // 1. 建立一个动态表A, 与source关联(Kafka的topic)
        tEnv
                .executeSql(
                        "create table page_log( " +
                                " common map<string,string>, " +
                                " page map<string,string>, " +
                                " ts bigint, " +
                                " et as to_timestamp(from_unixtime(ts/1000))," +
                                " watermark for et as et - interval '3' second " +
                                ")with(" +
                                " 'connector' = 'kafka'," +
                                " 'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                                " 'properties.group.id' = 'DwsKeywordStatsApp'," +
                                " 'topic' = '" + Constant.TOPIC_DWD_PAGE_LOG + "'," +
                                " 'scan.startup.mode' = 'latest-offset'," +
                                " 'format' = 'json'" +
                                ")"
                );
        // 计算每个词的热度(每个词的搜索次数)
        // 1. 查出来用户搜索的词
        Table t1 = tEnv.sqlQuery("select" +
                " page['item'] kw," +
                " page['last_page_id'] source," +
                " et " +
                "from page_log " +
                "where page['item'] is not null " +
                "and page['page_id']='good_list'");
        tEnv.createTemporaryView("t1", t1);

        // 2. 对搜索的关键词进行分词
        // 2.1 注册自定义函数
        tEnv.createTemporaryFunction("ik_analyzer", KeyWordUdtf.class);
        // 2.2 使用自定义函数进行切词
        Table t2 = tEnv.sqlQuery(
                "select " +
                        " kw, " +
                        " source, " +
                        " et, " +
                        " word" +
                        " from t1 " +
                        " join lateral table(ik_analyzer(kw)) on true"
        );
        tEnv.createTemporaryView("t2", t2);

        // 3. 开窗聚合每个word的次数
        Table t3 = tEnv.sqlQuery(
                "select " +
                        " date_format(tumble_start(et,interval '10' second),'yyyy-MM-dd HH:mm:ss') stt, " +
                        " date_format(tumble_end(et,interval '10' second),'yyyy-MM-dd HH:mm:ss') edt, " +
                        " word keyword," +
                        " source," +
                        " count(*) ct," +
                        " unix_timestamp()*1000 ts" +
                        " from t2" +
                        " group by word,source,tumble(et,interval '10' second)"
        );

        tEnv
                .toRetractStream(t3, KeywordStats.class)
                .filter(t -> t.f0)
                .map(t -> t.f1)
                .addSink(FlinkSinkUtil.getClickhouseSink("gmall2021", "keyword_stats_2021", KeywordStats.class));
    }
}
