package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSqlApp;
import com.atguigu.gmall.realtime.bean.KeywordStats;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.function.IkAnalyzer;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/10/13 9:07
 */
public class DwsSearchKeyWordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsSearchKeyWordStatsApp().init(4004, 1, "DwsSearchKeyWordStatsApp");
    }
    
    @Override
    public void run(StreamTableEnvironment tenv) {
        
        tenv.executeSql("create table page_log(" +
                            "   common map<string, string>, " +
                            "   page map<string, string>," +
                            "   ts bigint, " +
                            "   et as to_timestamp(from_unixtime(ts/1000))," +
                            "   watermark for et as et - interval '3' second " +
                            ")with(" +
                            "   'connector' = 'kafka', " +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092', " +
                            "   'properties.group.id' = 'DwsSearchKeyWordStatsApp', " +
                            "   'topic' = '" + Constant.TOPIC_DWD_PAGE + "', " +
                            "   'scan.startup.mode' = 'latest-offset', " +  // 如果没有消费记录,则从这个配置的地方开始消费, 如果有消费记录, 从上次的位置开始消费
                            "   'format' = 'json' " +
                            ")");
        
        // 1. 过滤出来需要的数据
        Table t1 = tenv.sqlQuery("select" +
                                     " page['item'] keyword, " +
                                     " et " +
                                     "from page_log " +
                                     "where page['page_id']='good_list' " +
                                     " and page['item'] is not null " +
                                     " and page['item_type']='keyword'");
        tenv.createTemporaryView("t1", t1);
        
        // 2. 对关键词进行分词
        // 2.1 注册自定义分词函数
        tenv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        // 2.2 列转行
        Table t2 = tenv.sqlQuery("select " +
                                     " keyword, " +
                                     " word, " +
                                     " et " +
                                     "from t1 " +
                                     "join lateral table(ik_analyzer(keyword)) on true");
    
        tenv.createTemporaryView("t2", t2);
        // 3. 开窗聚合
        Table t3 = tenv.sqlQuery("select " +
                                        " date_format(tumble_start(et, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                                        " date_format(tumble_end(et, interval '10' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
                                        " word keyword, " +
                                        " 'search' source, " +
                                        " count(word) ct, " +
                                        " unix_timestamp() *1000 ts " +  // count(*) count(word)  sum(1)
                                        "from t2 " +
                                        "group by " +
                                        " word, tumble(et, interval '10' second)");
        
        // 4. 结果写入到ClickHouse中
        tenv
            .toRetractStream(t3, KeywordStats.class)
            .filter(t -> t.f0)
            .map(t -> t.f1)
            .addSink(FlinkSinkUtil.getClickHouseSink(
                "gmall2021", "keyword_stats_2021", KeywordStats.class
            ));
        
    }
}
