package com.pw.gmall.realtime.app.dws;

import com.pw.gmall.realtime.app.BaseSqlApp;
import com.pw.gmall.realtime.app.BaseSqlTestApp;
import com.pw.gmall.realtime.common.Constant;
import com.pw.gmall.realtime.entities.KeyWordStats;
import com.pw.gmall.realtime.function.IKAnalyzer;
import com.pw.gmall.realtime.utils.FlinkSinkUtils;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: linux_future
 * @since: 2022/3/28
 **/
public class SearchKeyWordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new SearchKeyWordStatsApp().init(2030, 1, "SearchKeyWordStatsApp");
    }


    @Override
    protected void handle(StreamTableEnvironment tEnv) {
        tEnv.executeSql("CREATE TABLE page (" +
                "  common MAP<STRING,STRING>," +
                "  page MAP<string,string>," +
                "  ts bigint," +
                "  et  AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000, 'yyyy-MM-dd HH:mm:ss'))," +
                "  watermark for et as et - interval '3' second " +
                ") WITH (" +
                "  'connector' = 'kafka'," +
                "  'topic' = '" + Constant.TOPIC_DWD_PAGE_LOG + "'," +
                "  'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                "  'properties.group.id' = 'SearchKeyWordStatsApp'," +
                "  'scan.startup.mode' = 'earliest-offset'," +
                "  'format' = 'json'" +
                ")");
        Table t1 = tEnv.sqlQuery("select " +
                " page['item'] kw," +
                " et" +
                " from page" +
                " where page['page_id']='good_list'" +
                " and page['item_type']='keyword'" +
                " and page['item'] is not null");
        tEnv.createTemporaryView("t1", t1);
        //加入join表t2
        tEnv.createTemporaryFunction("ik_analyze", IKAnalyzer.class);
        Table t2 = tEnv.sqlQuery("select " +
                " word," +
                " et" +
                " from t1" +
                " join lateral table(ik_analyze(kw)) on true");
        //t2.execute().print();
        tEnv.createTemporaryView("t2", t2);
        //开窗聚合
        Table t3 = tEnv.sqlQuery("select " +
                " CONVERT_TZ(date_format(window_start,'yyyy-MM-dd HH:mm:ss'),'UTC', 'Asia/Shanghai') stt," +
                " convert_tz(date_format(window_end,'yyyy-MM-dd HH:mm:ss'),'utc','Asia/Shanghai') edt," +
                " word keyword," +
                " 'search' source," +
                " count(*) ct," +
                " unix_timestamp() * 1000 ts" +
                " from table(tumble(table t2,descriptor(et),interval '5' second))" +
                " group by word,window_start,window_end");
        tEnv.toRetractStream(t3, KeyWordStats.class)
                .filter(t -> t.f0)
                .map(t -> t.f1)
                .addSink(FlinkSinkUtils.getClickhouseSink(Constant.CLICKHOUSE_DB,
                        Constant.CLICKHOUSE_TABLE_KEYWORD_STATS,
                        KeyWordStats.class));
    }

}
