package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.common.GmallConstant;
import com.atguigu.realtime.function.KeywordUdtf;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/4/27 11:38
 */
public class DWSKeyWordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DWSKeyWordStatsApp().init(4004, 2, "DWSKeyWordStatsApp");
    }
    
    @Override
    protected void run(StreamTableEnvironment tEnv) {
        // 1. 从哪个topic读取数据: dwd_page
        tEnv.executeSql("CREATE TABLE page_view (" +
                            "   common MAP<STRING,STRING>, " +
                            "   page MAP<STRING,STRING>," +
                            "   ts BIGINT, " +
                            "   rowtime AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000, 'yyyy-MM-dd HH:mm:ss'))," +
                            "   WATERMARK FOR  rowtime  AS  rowtime - INTERVAL '2' SECOND " +
                            ") WITH(" +
                            "   'connector' = 'kafka'," +
                            "   'properties.bootstrap.servers' = 'hadoop162:9029,hadoop163:9092,hadoop164:9092'," +
                            "   'properties.group.id' = 'DWSKeyWordStatsApp'," +
                            "   'topic' = 'dwd_page'," +
                            "   'scan.startup.mode' = 'earliest-offset'," +
                            "   'format' = 'json'" +
                            ")");
        //tEnv.sqlQuery("select page['item'], page['page_id'] from page_view").execute().print();
        
        // 1. 对数据做过滤  item is not null  and page_id='good_list'
        Table t1 = tEnv.sqlQuery("select" +
                                     " page['item'] keyword, " +
                                     " rowtime " +
                                     "from page_view " +
                                     "where page['item'] is not null " +
                                     "and page['page_id']='good_list'");
        tEnv.createTemporaryView("t1", t1);
        
        // 2. 使用 table function进行对关键词进程分词拆分
        // 2.1 先注册自定义函数
        tEnv.createTemporaryFunction("ik_analyzer", KeywordUdtf.class);
        // 2.2 使用自定义函数
        Table t2 = tEnv.sqlQuery("select " +
                                     " kw, " +
                                     " rowtime " +
                                     "from t1  " +
                                     "join lateral table(ik_analyzer(keyword)) as T(kw) " +
                                     "on true");
        tEnv.createTemporaryView("t2", t2);
        // 3. 开窗, 分组聚合
        Table resultTable = tEnv.sqlQuery("select" +
                                        "   date_format(tumble_start(rowtime, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                                        "   date_format(tumble_end(rowtime, interval '10' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
                                        "   kw," +
                                        "   '" + GmallConstant.KEYWORD_SEARCH + "' source, " +
                                        "   count(*) ct," +
                                        "   unix_timestamp()*100 ts " +
                                        "from t2 " +
                                        "group by " +
                                        "   tumble(rowtime, interval '10' second), kw");
    
        // 4. 使用ddl建表, 与ClickHouse关联
        tEnv.executeSql("create table keyword_stats_2021(" +
                            "   stt string," +
                            "   edt string," +
                            "   keyword string," +
                            "   source string," +
                            "   ct bigint," +
                            "   ts bigint," +
                            "   PRIMARY KEY (stt, edt, keyword, source) NOT ENFORCED" +
                            ")with(" +
                            "   'connector' = 'clickhouse', " +
                            "   'url' = 'clickhouse://hadoop162:8123', " +
                            "   'database-name' = 'gmall2021', " +
                            "   'table-name' = 'keyword_stats_2021'," +
                            "   'sink.batch-size' = '100', " +
                            "   'sink.flush-interval' = '1000', " +
                            "   'sink.max-retries' = '3' " +
                            ")");
        
        // 5. 把数据写入到ClickHouse中
//        resultTable.executeInsert();
        tEnv.executeSql("insert into keyword_stats_2021 select * from " + resultTable);
    
    }
    /*
    select .. form a,b where a.id=b.id;
    select ... from a join b on a.id=b.id
     */
}
