package app.dws;

import app.dwd.BaseAppSQL;
import bean.KeywordStats;
import common.Constant;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import function.KeywordUDTF;
import util.GmallSinkUtil;

public class DWSKeyWordStatsApp extends BaseAppSQL {

    public static void main(String[] args) {
        new DWSKeyWordStatsApp().init(7236, 1, "DWSKeyWordStatsApp");
    }

    @Override
    protected void run(StreamTableEnvironment tEnv) {

        //1.创建表连接kafka, 从dws_page_id中取数据
        tEnv.executeSql("create table page_log(" +
                            "common map<string,string>," +
                            "page map<string,string>," +
                            "ts bigint," +
                            "et as to_timestamp(from_unixtime(ts/1000)) ," +
                            "watermark for et as et - interval '5' second" +
                            ")with(" +
                            "  'connector' = 'kafka'," +
                            "  'topic' = ' " + Constant.TOPIC_DWD_PAGE_LOG + "'," +
                            "  'properties.bootstrap.servers' = 'hadoop162:9092'," +
                            "  'properties.group.id' = 'DWSKeyWordStatsApp'," +
                            "  'scan.startup.mode' = 'latest-offset'," +
                            "  'format' = 'json' " +
                            ")");
        //tEnv.sqlQuery("select page['item'] from page_log").execute().print();
        //2. 连续查询---聚合
        Table table = tEnv.sqlQuery("select" +
                                        "  et," +
                                        "  page['item'] fullkey " +
                                        "  from page_log" +
                                        "  where page['page_id'] = 'good_list'" +
                                        "  and page['item'] is not null ");

        tEnv.createTemporaryView("t1", table);
        tEnv.createTemporaryFunction("ikanalyzer", KeywordUDTF.class);

        Table table1 = tEnv.sqlQuery("select " +
                                         " date_format(tumble_start(et,interval '5' second) ,'yyyy-MM-dd HH:mm:ss') stt," +
                                         " date_format(tumble_end(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') edt," +
                                         " split_word keyword, " +
                                         " 'search' source, " +
                                         " count(*) ct, " +
                                         " unix_timestamp()*1000 ts " +
                                         " from t1 " +
                                         " join lateral table(ikanalyzer(fullkey)) on true " +
                                         " group by tumble(et,interval '10' second), " +
                                         " split_word ");

        tEnv
            .toRetractStream(table1, KeywordStats.class)
            .filter(t -> t.f0)
            .map(t -> t.f1)
            .addSink(GmallSinkUtil.getClickHouseSink(Constant.CLICKHOUSE_DATABASE,
                                                     Constant.CLICKHOUSE_KEYWORD_STATS_2021,
                                                     KeywordStats.class));


    }
}
