package com.atguigu.gmall.app.dws;

import com.atguigu.gmall.app.BaseSqlApp;
import com.atguigu.gmall.bean.KeywordStats;
import com.atguigu.gmall.common.Constant;
import com.atguigu.gmall.function.KWUdtf;
import com.atguigu.gmall.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/8/7 9:11
 */
public class DwsKeyWordSearchStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsKeyWordSearchStatsApp().init(4004, 1, "DwsKeyWordSearchStatsApp");
    }
    
    @Override
    protected void run(StreamTableEnvironment tEnv) {
        tEnv.executeSql("create table page_log(" +
                            "   common map<string, string>, " +
                            "   page map<string, string>, " +
                            "   ts bigint, " +
                            "   et as to_timestamp(from_unixtime(ts/1000)), " +
                            "   watermark for et as et - interval '10' second " +
                            ")with(" +
                            "   'connector' = 'kafka', " +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092, hadoop164:9092', " +
                            "   'properties.group.id' = 'DwsKeyWordSearchStatsApp', " +
                            "   'topic' = '" + Constant.TOPIC_DWD_PAGE + "', " +
                            "   'scan.startup.mode' = 'earliest-offset', " +
                            "   'format' = 'json' " +
                            ")");
        // 查询
        // 统计搜索热词
        // 1. 过滤出搜索记录
        Table t1 = tEnv.sqlQuery("select" +
                                     " page['item'] keyword, " +
                                     " et " +
                                     "from page_log " +
                                     "where page['page_id'] = 'good_list' and " +
                                     "page['item'] is not null");
        tEnv.createTemporaryView("t1", t1);
        
        // 2. 注册自定义函数
        tEnv.createTemporaryFunction("ik_analyzer", KWUdtf.class);
        
        Table t2 = tEnv.sqlQuery("select" +
                                     " word, " +
                                     " et " +
                                     "from t1 " +
                                     "join lateral table(ik_analyzer(keyword)) on true");
        tEnv.createTemporaryView("t2", t2);
        
        // 3. 统计每个窗口,每个关键词搜索的次数
        Table resultTable = tEnv.sqlQuery("select" +
                                              " date_format(tumble_start(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                                              " date_format(tumble_end(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
                                              " word keyword, " +
                                              " 'search' source, " +
                                              " count(*) ct," +
                                              " unix_timestamp() * 1000 ts " +
                                              "from t2 " +
                                              "group by word, tumble(et, interval '5' second)");
        
        // 4. 写入到ClickHouse中
        tEnv
            .toRetractStream(resultTable, KeywordStats.class)
            .filter(t -> t.f0)
            .map(t -> t.f1)
            .addSink(FlinkSinkUtil.getClickHouseSink(Constant.CLICKHOUSE_DB,Constant.TABLE_KEYWORD_STATS, KeywordStats.class));
            
        
    }
}
