package app.dws;

import app.dwd.BaseAppSQL;
import bean.KeywordStats;
import common.Constant;
import function.KeywordUDTF;
import function.kwProductUDTF;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import util.GmallSinkUtil;

import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;

public class DWSProductKeywordStatsApp extends BaseAppSQL {

    public static void main(String[] args) {
        new DWSProductKeywordStatsApp().init(7534, 1, "DWSProductKeywordStatsApp");
    }

    @Override
    protected void run(StreamTableEnvironment tEnv) {

        //1.创建表连接kafka, 从dws_page_id中取数据
        tEnv.executeSql("create table productStats(" +
                            " stt string," +
                            " edt string," +
                            " sku_name string," +
                            " click_ct bigint," +
                            " order_ct bigint," +
                            " cart_ct bigint " +
                            ")with(" +
                            "  'connector' = 'kafka'," +
                            "  'topic' = ' " +Constant.TOPIC_DWS_PRODUCT_STATS_2021 +"'," +
                            "  'properties.bootstrap.servers' = 'hadoop162:9092'," +
                            "  'properties.group.id' = 'DWSProductKeywordStatsApp'," +
                            "  'scan.startup.mode' = 'latest-offset'," +
                            "  'format' = 'json' " +
                            ")");

        //2. 连续查询---过滤条件必须每行数据点击量,订单量,加购量必须至少有一种才行

        Table table = tEnv.sqlQuery("select" +
                                        "*" +
                                        " from productStats" +
                                        " where click_ct>0 or order_ct>0 or cart_ct>0 "
        );
        //3. 连续查询--表值函数
        tEnv.createTemporaryFunction("ikanalyzer", KeywordUDTF.class);
       tEnv.createTemporaryView("t1",table);

        Table table1 = tEnv.sqlQuery("select " +
                                         " stt,edt,split_word,click_ct,order_ct,cart_ct" +
                                         " from t1" +
                                         " join lateral table( ikanalyzer(sku_name)) on true");
        //4. 连续查询--先进行聚合
        tEnv.createTemporaryView("t2",table1);
        Table table2 = tEnv.sqlQuery("select " +
                                         " stt,edt,split_word,sum(click_ct) click_ct,sum(order_ct) order_ct,sum(cart_ct) cart_ct" +
                                         " from t2 group by stt,edt,split_word");
        //5. 连续查询-- 关联表值函数
        tEnv.createTemporaryFunction("kwProductUDTF", kwProductUDTF.class);

        tEnv.createTemporaryView("t3",table2);
        Table table3 = tEnv.sqlQuery("select" +
                                         " stt,edt,split_word keyword,source,ct,unix_timestamp()*1000 ts" +
                                         " from t3" +
                                         " join lateral table(kwProductUDTF(click_ct,order_ct,cart_ct)) on true ");


        tEnv
            .toRetractStream(table3, KeywordStats.class)
            .filter(t->t.f0)
            .map(t->t.f1)
            .addSink(GmallSinkUtil.getClickHouseSink(Constant.CLICKHOUSE_DATABASE,
                                                     Constant.CLICKHOUSE_KEYWORD_STATS_2021,
                                                     KeywordStats.class));


    }
}
