package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.bean.KeyWordStats;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.function.IkAnalyzer;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/4/24 9:10
 */
public class SearchKeywordStatsApp extends BaseSQLApp {
    public static void main(String[] args) {
        new SearchKeywordStatsApp().init("SearchKeywordStatsApp", 4004, 1, "SearchKeywordStatsApp");
    }
    
    @Override
    public void handle(StreamTableEnvironment tEnv) {
        
        
        tEnv.executeSql("create table page(" +
                            "   page map<string, string>, " +
                            "   ts bigint, " +
                            "   et as to_timestamp_ltz(ts, 3)," +
                            "   watermark for et as et - interval '3' second" +
                            ")with(" +
                            "  'connector' = 'kafka', " +
                            "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "', " +
                            "  'properties.group.id' = 'SearchKeywordStatsApp', " +
                            "  'topic' = '" + Constant.TOPIC_DWD_PAGE + "', " +
                            "  'scan.startup.mode' = 'latest-offset', " +
                            "  'format' = 'json'" +
                            ")");
        
        // 1. 过滤出来需要的数据:
        Table t1 = tEnv.sqlQuery("select" +
                                     " page['item'] kw, " +
                                     " et " +
                                     "from page " +
                                     "where page['page_id'] = 'good_list' " +
                                     "and page['item_type'] = 'keyword' " +
                                     "and page['item'] is not null");
        tEnv.createTemporaryView("t1", t1);
        
        // 2. 对搜索关键词进行分词
        // 自定义函数: scalar  table aggregate tableaggregate
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        // select .. from a join b on a.it=b.it
        // select .. from a, b where a.it=b.it
        Table t2 = tEnv.sqlQuery("select" +
                                     " word, " +
                                     " et " +
                                     "from t1 " +
                                     "join lateral table(ik_analyzer(kw)) on true");
        
        // 3. 开窗聚合
        tEnv.createTemporaryView("t2", t2);
        
        Table result = tEnv.sqlQuery("select" +
                                         " convert_tz(date_format(window_start, 'yyyy-MM-dd HH:mm:ss'), 'UTC', 'Asia/Shanghai') stt, " +
                                         " convert_tz(date_format(window_end, 'yyyy-MM-dd HH:mm:ss'), 'UTC', 'Asia/Shanghai') edt, " +
                                         " word keyword, " +
                                         " 'search' source, " +
                                         " count(*) ct, " +
                                         " unix_timestamp() *1000 ts " +
                                         "from table( tumble( table t2, descriptor(et), interval '5' second ) ) " +
                                         "group by word, window_start, window_end");
        
        // 3. 写入到CLickhouse中
        tEnv
            .toRetractStream(result, KeyWordStats.class)
            .filter(t -> t.f0)
            .map(t -> t.f1)
            .addSink(FlinkSinkUtil.getClickHouseSink(Constant.CLICKHOUSE_DB, "keyword_stats_2022", KeyWordStats.class));
        
        
    }
}
