package com.atguigu.gmall.realtime.dws.app;


import com.atguigu.gmall.realtime.common.base.BaseSQLApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.dws.function.KwSplit;
import com.atguigu.gmall.realtime.common.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author atguigu
 * @Date 2023/7/11 14:54
 */
public class Dws_01_DwsTrafficSourceKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_01_DwsTrafficSourceKeywordPageViewWindow().start(
            40001,
            2,
            "Dws_01_DwsTrafficSourceKeywordPageViewWindow"
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        // 1. 读取 页面日志
        tEnv.executeSql("create table page_log(" +
                            " page map<string, string>, " +
                            " ts bigint, " +
                            " et as to_timestamp_ltz(ts, 3), " +
                            " watermark for et as et - interval '3' second " +
                            ")" + SQLUtil.getKafkaDDLSource("PageViewWindow", Constant.TOPIC_DWD_TRAFFIC_PAGE));
        
        // 2. 读取搜索关键词
        Table kwTable = tEnv.sqlQuery("select " +
                                          "page['item'] kw, " +
                                          "et " +
                                          "from page_log " +
                                          "where ( page['last_page_id'] ='search' " +
                                          "        or page['last_page_id'] ='home' " +
                                          "       )" +
                                          "and page['item_type']='keyword' " +
                                          "and page['item'] is not null ");
        tEnv.createTemporaryView("kw_table", kwTable);
        
        // 3. 自定义分词函数
        tEnv.createTemporaryFunction("kw_split", KwSplit.class);
        
        Table keywordTable = tEnv.sqlQuery("select " +
                                               " keyword, " +
                                               " et " +
                                               "from kw_table " +
                                               "join lateral table(kw_split(kw)) on true ");
        tEnv.createTemporaryView("keyword_table", keywordTable);
        
        
        // 3. 开窗聚和 tvf
        
        Table result = tEnv.sqlQuery("select " +
                                         " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                                         " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                                         " keyword," +
                                         " date_format(window_start, 'yyyyMMdd') cur_date, " +
                                         " count(*) keyword_count " +
                                         "from table( tumble(table keyword_table, descriptor(et), interval '5' second ) ) " +
                                         "group by window_start, window_end, keyword ");
        // 5. 写出到 doris 中
        tEnv.executeSql("create table dws_traffic_source_keyword_page_view_window(" +
                            "  stt string, " +  // 2023-07-11 14:14:14
                            "  edt string, " +
                            "  keyword string, " +
                            "  cur_date string, " +
                            "  keyword_count bigint " +
                            ")with(" +
                            " 'connector' = 'doris'," +
                            " 'fenodes' = 'hadoop102:7030'," +
                            "  'table.identifier' = 'gmall2023.dws_traffic_source_keyword_page_view_window'," +
                            "  'username' = 'root'," +
                            "  'password' = 'aaaaaa', " +
                            "  'sink.properties.format' = 'json', " +
                            "  'sink.buffer-count' = '4', " +
                            "  'sink.buffer-size' = '4086'," +
                            "  'sink.enable-2pc' = 'false', " + // 测试阶段可以关闭两阶段提交,方便测试  eos
                            "  'sink.properties.read_json_by_line' = 'true' " +
                            ")");
        result.executeInsert("dws_traffic_source_keyword_page_view_window");
        
        
    }
}
/*
关键词粒度页面浏览各窗口汇总
数据源:
    日志: 页面日志
    
    搜索页面 => 搜索结果页面(目的)
    
    last_page_id = search  and item_type=keyword  and item is not null
    last_page_id = home  and item_type=keyword  and item is not null
    优化:
        (last_page_id = search || last_page_id = home) && item_type=keyword  && item is not null

----
关键词: 不同用户搜索习惯不一样. 需要把关键词分词之后再去开窗聚合
    分词器: 中文 IK 分词器
    
自定义 函数
    标量函数 scalar  1 1
    制表函数 Table   1 n
    聚和函数 Aggregate  n 1
    制表聚合 TableAggregate  n n
    
电脑 华为 黑色
                电脑
                华为
                黑色
手机苹果金色
                手机
                苹果
                金色


----------
开窗聚合:
    tvf:  滚动
-----------
0-5  电脑  10
0-5  手机  11
5-10 ...
...
 */