package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSqlApp;
import com.atguigu.gmall.realtime.common.GmallConstant;
import com.atguigu.gmall.realtime.udf.KeyWordUdtf;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/3/24 15:05
 */
public class DWSKeyWordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DWSKeyWordStatsApp().init(30004, 2, "DWSKeyWordStatsApp");
    }
    
    @Override
    public void run(StreamTableEnvironment tenv) {
        tenv.executeSql("create table page_view (" +
                            "  common map<string, string> , " +
                            "  page map<string, string> , " +
                            "  ts bigint, " +
                            "  et as to_timestamp(from_unixtime(ts / 1000, 'yyyy-MM-dd HH:mm:ss')), " +
                            "  watermark for et as et - interval '5' second" +
                            ") with(" +
                            "   'connector' = 'kafka'," +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                            "   'properties.group.id' = 'DWSKeyWordStatsApp'," +
                            "   'topic' = 'dwd_page_log'," +
                            "   'scan.startup.mode' = 'latest-offset'," +
                            "   'format' = 'json'" +
                            ")");
        
        // 统计一下每个关键词出现的次数
        
        // 1.过滤出来需要的数据: item不是null, 并且pageId是good_list
        final Table t1 = tenv.sqlQuery("select " +
                                           " page['item'] keyword, " +
                                           " et " +
                                           "from page_view " +
                                           "where page['item'] is not null " +
                                           "and page['page_id']='good_list' ");
        tenv.createTemporaryView("t1", t1);
        
        // 2. 利用udtf函数进行炸裂 拆分
        // 2.1 注册自定义函数
        tenv.createTemporaryFunction("ik_analyzer", KeyWordUdtf.class);
        
        final Table t2 = tenv.sqlQuery("select " +
                                           " kw, " +
                                           " et " +
                                           "from t1, " +
                                           "lateral table(ik_analyzer(keyword)) as T(kw)");
        
        tenv.createTemporaryView("t2", t2);
        
        // 3. 开窗聚合
        final Table t3 = tenv.sqlQuery("select" +
                                              "  date_format(tumble_start(et, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                                              "  date_format(tumble_end(et, interval '10' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
                                              "  kw keyword, " +
                                              " '" + GmallConstant.KEYWORD_SEARCH + "' source, " + // " 'SEARCH source' "
                                              " count(*) ct, " +
                                              " unix_timestamp()*1000 ts " +
                                              "from t2 " +
                                              "group by " +
                                              " tumble(et, interval '10' second), kw ");
        // 4. 建表, 关联ClickHouse
    
        tenv.executeSql("create table keyword_stats_2021(" +
                            "   stt string," +
                            "   edt string," +
                            "   keyword string," +
                            "   source string," +
                            "   ct bigint," +
                            "   ts bigint," +
                            "   PRIMARY KEY (stt, edt, keyword, source) NOT ENFORCED" +
                            ")with(" +
                            "   'connector' = 'clickhouse', " +
                            "   'url' = 'clickhouse://hadoop162:8123', " +
                            "   'database-name' = 'gmall2021', " +
                            "   'table-name' = 'keyword_stats_2021'," +
                            "   'sink.batch-size' = '100', " +
                            "   'sink.flush-interval' = '1000', " +
                            "   'sink.max-retries' = '3' " +
                            ")");
    
        t3.executeInsert("keyword_stats_2021");
    
    
    
    }
}
