package com.atgugu.realtime.app.dws;

import com.atgugu.realtime.app.BaseSqlApp;
import com.atgugu.realtime.bean.KeywordStats;
import com.atgugu.realtime.common.Constant;
import com.atgugu.realtime.function.IkAnalyzer;
import com.atgugu.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneOffset;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2022/1/5 9:12
 */
public class DwsSearchKeywordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsSearchKeywordStatsApp().init(4004, 1, "DwsSearchKeywordStatsApp");
        
    }
    @Override
    protected void run(StreamTableEnvironment tEnv) {
        tEnv.getConfig().setLocalTimeZone(ZoneOffset.ofHours(8));
        // 1. 建立动态表, 从kakfa读数据: dwd_page
        tEnv.executeSql("create table dwd_page(" +
                            "   page map<string, string>, " +
                            "   ts bigint, " +
                            "   et as to_timestamp_ltz(ts, 3), " +
                            "   watermark for et as et - interval '3' second " +
                            ")with(" +
                            "   'connector'='kafka', " +
                            "   'properties.bootstrap.servers'='" + Constant.KAFKA_BROKERS + "', " +
                            "   'properties.group.id'='DwsSearchKeywordStatsApp', " +
                            "   'topic'='" + Constant.TOPIC_DWD_PAGE + "', " +
                            "   'format'='json', " +
                            "   'scan.startup.mode'='earliest-offset' " +
                            ")");
        
        // 2. 过滤出需要的数据
        Table t1 = tEnv.sqlQuery("select" +
                                        " page['item'] kw, " +
                                        " et " +
                                        "from dwd_page " +
                                        "where page['page_id']='good_list' " +
                                        "and page['item_type']='keyword' " +
                                        "and page['item'] is not null ");
        
        tEnv.createTemporaryView("t1", t1);
        
        // 3. 把关键词分词 : 用到自定义函数(?)
        // 3.1 注册函数
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        // 3.1 使用
        Table t2 = tEnv.sqlQuery("select " +
                                        " keyword, " +
                                        " et " +
                                        "from t1 " +
                                        "join lateral table(ik_analyzer(kw)) on true");
        tEnv.createTemporaryView("t2", t2);
    
        // 4. 开窗聚合
        Table result = tEnv.sqlQuery("select" +
                                        " date_format(tumble_start(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss' )stt," +
                                        " date_format(tumble_end(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss' )edt," +
                                        " keyword, " +
                                        " 'search' source, " +
                                        " count(*) ct," +
                                        " unix_timestamp() *1000 ts " +
                                        "from t2 " +
                                        "group by " +
                                        " tumble(et, interval '5' second), " +
                                        " keyword");
    
        // 5. 把结果写入到ClickHouse中
        tEnv.toRetractStream(result, KeywordStats.class)
            .filter(t -> t.f0)
            .map(t -> t.f1)
            .addSink(FlinkSinkUtil.getClickHouseSink("gmall2021", "keyword_stats_2021", KeywordStats.class));
    }
}
