package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.bean.KeywordBean;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.function.MySplit;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/12/10 08:44
 */
public class Dws_01_DwsTrafficKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_01_DwsTrafficKeywordPageViewWindow().init(
            4001,
            2,
            "Dws_01_DwsTrafficKeywordPageViewWindow"
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        // 1. 读取页面日志数据(ddl)
        tEnv.executeSql("create table page(" +
                            "   page map<string, string>, " +
                            "   ts bigint, " +
                            "   et as to_timestamp_ltz(ts, 3), " +
                            "   watermark for et as et - interval '3' second " +
                            ")" + SQLUtil.getKafkaSourceDDL(Constant.TOPIC_DWD_TRAFFIC_PAGE, "Dws_01_DwsTrafficKeywordPageViewWindow"));
        
        // 2. 找到搜索记录, 取出搜索关键词
        Table keywordTable = tEnv.sqlQuery("select " +
                                               " page['item'] keyword, " +
                                               " et " +
                                               "from page " +
                                               "where ( page['last_page_id'] ='home' " +
                                               "     or page['last_page_id'] = 'search' ) " +
                                               "and page['item'] is not null " +
                                               "and page['item_type']='keyword'");
        tEnv.createTemporaryView("keyword_table", keywordTable);
        // 3. 对关键词进行分词
        tEnv.createTemporaryFunction("my_split", MySplit.class);
        Table kwTable = tEnv.sqlQuery("select " +
                                          " kw, " +
                                          " et  " +
                                          "from keyword_table " +
                                          "join lateral table(my_split(keyword)) as t(kw) on true");
        
        tEnv.createTemporaryView("kw_table", kwTable);
        // 4. 开窗聚合:  tvf 函数
        Table resultTable = tEnv.sqlQuery("select " +
                                              " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                                              " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                                              " kw keyword, " +
                                              " count(*) keyword_count, " +
                                              " unix_timestamp()*1000 as ts " +   // et as ...
                                              "from table( tumble( table kw_table, descriptor(et), interval '5' second ) )" +
                                              "group by kw, window_start, window_end");
        
        // 5. 写出到 clickhouse 中
        // 表转成流
        SingleOutputStreamOperator<KeywordBean> stream = tEnv
            .toRetractStream(resultTable, KeywordBean.class)
            .filter(t -> t.f0)  // 只保留新增和更新后的数据. (对更新的数据, 将来写入到 clickhouse 的时候, 会自动覆盖旧数据)
            .map(t -> t.f1);
    
        stream.addSink(FlinkSinkUtil.getClickHouseSink("dws_traffic_keyword_page_view_window", KeywordBean.class));
    
        
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        
    }
}
/*
自定义 sink 把数据写出到 clickhouse 后中
  支持 jdbc 连接
 
 1. 直接通过 jdbc api, 向 clickhouse 写入
 
 2. 在 jdbc sink 的基础上, 封装一个 clickhouse sink




-------
找到搜索记录的特征:
    1. last_page_id ='search'  && item_type='keyword'  && item != null
    2. last_page_id ='home'  && item_type='keyword'  && item != null
    
---------------

"华为手机 白色 256g"
    华为
    手机
    白色
    256g
"华为手机  白色 256g"
    华为
    手机
    白色
    256g
"华为手机 256g"
    华为
    手机
    256g

  分词工具:
    ik分词器
    
  自定函数: scalar  table aggregate TableAggregate
    
 */