package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSqlApp;
import com.atguigu.gmall.realtime.udf.KeyWordProductUdtf;
import com.atguigu.gmall.realtime.udf.KeyWordUdtf;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/3/26 9:23
 */
public class DWSProductKeyWordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DWSProductKeyWordStatsApp().init(30010, 2, "DWSProductKeyWordStatsApp");
    }
    
    @Override
    public void run(StreamTableEnvironment tenv) {
        tenv.executeSql("create table product_stats(" +
                            "   spu_name string, " +
                            "   click_ct bigint,  " +
                            "   cart_ct bigint,  " +
                            "   order_ct bigint,  " +
                            "   stt string, " +
                            "   edt string " +
                            ")with(" +
                            "   'connector' = 'kafka'," +
                            "   'topic' = 'dws_product_stats'," +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                            "   'properties.group.id' = 'DWSProductKeyWordStatsApp'," +
                            "   'scan.startup.mode' = 'latest-offset'," +
                            "   'format' = 'json'" +
                            ")");
        
        tenv.createTemporaryFunction("ik_analyzer", KeyWordUdtf.class);
        final Table t1 = tenv.sqlQuery("select " +
                                           "    keyword, " +
                                           "    click_ct, " +
                                           "    cart_ct, " +
                                           "    order_ct ,  " +
                                           "    stt , " +
                                           "    edt  " +
                                           "from product_stats, " +
                                           "lateral table (ik_analyzer(spu_name)) as T(keyword) ");
        tenv.createTemporaryView("t1", t1);
        
        // 1. 按照 keyword, stt, edt进行聚合
    
        final Table t2 = tenv.sqlQuery("select " +
                                              "    keyword, " +
                                              "    stt, " +
                                              "    edt,  " +
                                              "    sum(click_ct) click_ct, " +
                                              "    sum(cart_ct) cart_ct, " +
                                              "    sum(order_ct) order_ct " +
                                              "from t1 " +
                                              "group by keyword, stt, edt");
    
        tenv.createTemporaryView("t2", t2);
        
        // 2. 列转行: 一行的三个指标, 转到3行
        tenv.createTemporaryFunction("keyword_product", KeyWordProductUdtf.class);
        final Table t3 = tenv.sqlQuery("select" +
                                              " stt, " +
                                              " edt," +
                                              " keyword," +
                                              " source," +
                                              " ct," +
                                              " unix_timestamp()*1000 ts " +
                                              "from t2," +
                                              "lateral table(keyword_product(click_ct, order_ct, cart_ct)) as T(source, ct)");
    
        // 数据写入到ClickHouse
        tenv.executeSql("create table keyword_stats_2021(" +
                            "   stt string," +
                            "   edt string," +
                            "   keyword string," +
                            "   source string," +
                            "   ct bigint," +
                            "   ts bigint," +
                            "   PRIMARY KEY (stt, edt, keyword, source) NOT ENFORCED" +
                            ")with(" +
                            "   'connector' = 'clickhouse', " +
                            "   'url' = 'clickhouse://hadoop162:8123', " +
                            "   'database-name' = 'gmall2021', " +
                            "   'table-name' = 'keyword_stats_2021'," +
                            "   'sink.batch-size' = '100', " +
                            "   'sink.flush-interval' = '1000', " +
                            "   'sink.max-retries' = '3' " +
                            ")");
        
        t3.executeInsert("keyword_stats_2021");
        
    }
}
