package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.function.KeywordProductUdtf;
import com.atguigu.realtime.function.KeywordUdtf;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/4/27 11:38
 */
public class DWSKeyWordProductStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DWSKeyWordProductStatsApp().init(4005, 2, "DWSKeyWordProductStatsApp");
    }
    
    @Override
    protected void run(StreamTableEnvironment tEnv) {
        
        tEnv.executeSql("create table product_stats (" +
                            "   spu_name STRING, " +
                            "   click_ct BIGINT," +
                            "   cart_ct BIGINT," +
                            "   order_ct BIGINT," +
                            "   stt STRING," +
                            "   edt STRING" +
                            ") with(" +
                            "   'connector' = 'kafka'," +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                            "   'properties.group.id' = 'DWSKeyWordProductStatsApp'," +
                            "   'topic' = 'dws_product_stats'," +
                            "   'scan.startup.mode' = 'earliest-offset'," +
                            "   'format' = 'json'" +
                            ")");
        
        // 1. 先把spu_name进行的分词
        tEnv.createTemporaryFunction("ik_analyzer", KeywordUdtf.class);
    
        Table t1 = tEnv.sqlQuery("select " +
                                        " keyword, " +
                                        " click_ct," +
                                        " cart_ct," +
                                        " order_ct," +
                                        " stt," +
                                        " edt " +
                                        "from product_stats, " +
                                        "lateral table (ik_analyzer(spu_name)) as T(keyword)");
        tEnv.createTemporaryView("t1", t1);
        
        // 2. 按照关键词分组聚合
        Table t2 = tEnv.sqlQuery("select " +
                                     " stt, " +
                                     " edt, " +
                                     " keyword, " +
                                     " sum(click_ct) click_ct," +
                                     " sum(cart_ct) cart_ct," +
                                     " sum(order_ct) order_ct " +
                                     "from t1 " +
                                     "group by keyword,stt,edt" );
        tEnv.createTemporaryView("t2", t2);
    
        // 2. 把三个统计变成3行
        tEnv.createTemporaryFunction("kw_product", KeywordProductUdtf.class);
        Table resultTable = tEnv.sqlQuery("select " +
                                        " stt," +
                                        " edt, " +
                                        " keyword, " +
                                        " source, " +
                                        " ct, " +
                                        " unix_timestamp()*1000 ts " +
                                        "from t2, " +
                                        "lateral table (kw_product(click_ct, cart_ct, order_ct))");
    
        tEnv.executeSql("create table keyword_stats_2021(" +
                            "   stt string," +
                            "   edt string," +
                            "   keyword string," +
                            "   source string," +
                            "   ct bigint," +
                            "   ts bigint," +
                            "   PRIMARY KEY (stt, edt, keyword, source) NOT ENFORCED" +
                            ")with(" +
                            "   'connector' = 'clickhouse', " +
                            "   'url' = 'clickhouse://hadoop162:8123', " +
                            "   'database-name' = 'gmall2021', " +
                            "   'table-name' = 'keyword_stats_2021'," +
                            "   'sink.batch-size' = '1', " +
                            "   'sink.flush-interval' = '1000', " +
                            "   'sink.max-retries' = '3' " +
                            ")");
        
        // 5. 把数据写入到ClickHouse中
        resultTable.executeInsert("keyword_stats_2021");
    
    }
   
}
