package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSQLApp;
import com.atguigu.realtime.bean.KeywordBean;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.function.IkAnalyzer;
import com.atguigu.realtime.util.FlinkSinkUtil;
import com.atguigu.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/7/24 14:21
 */
public class Dws_01_DwsTrafficSourceKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_01_DwsTrafficSourceKeywordPageViewWindow().init(
            3001,
            2,
            "Dws_01_DwsTrafficSourceKeywordPageViewWindow"
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        // 1. 读取 dwd层页面日志
        tEnv.executeSql("create table page_log(" +
                            " page map<string, string>, " +
                            " ts bigint, " +
                            " et as to_timestamp_ltz(ts, 3), " +
                            " watermark for et as et - interval '3' second" +
                            ")" + SQLUtil.getKafkaSourceDDL(Constant.TOPIC_DWD_TRAFFIC_PAGE, "Dws_01_DwsTrafficSourceKeywordPageViewWindow"));
        //        tEnv.sqlQuery("select * from page_log").execute().print();
        
        // 2. 过滤搜索记录, 取出搜索关键词
        Table kwTable = tEnv.sqlQuery("select " +
                                          " page['item'] kw, " +
                                          " et " +
                                          "from page_log " +
                                          "where (page['last_page_id']='search' or page['last_page_id']='home') " +
                                          "and page['item_type']='keyword' " +
                                          "and page['item'] is not null");
        tEnv.createTemporaryView("kw_table", kwTable);
        
        // 3.对搜索关键词进行分词
        // 3.1 注册自定义函数
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        // 3.2 在sql语句中使用
        Table keywordTable = tEnv.sqlQuery("select " +
                                               " keyword, " +
                                               " et " +
                                               "from kw_table " +
                                               "join lateral table(ik_analyzer(kw)) on true");
        tEnv.createTemporaryView("keyword_table", keywordTable);
        //        keywordTable.execute().print();
        
        // 4. 分词之后的次进行计数
        // 窗口:  group  TVF   over
        // group:  滚动 滑动 会话
        // tvf: 滚动 滑动 累积
        Table result = tEnv.sqlQuery("select " +
                                         " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                                         " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                                         " 'search' source, " +  // 常量: 表示关键词的来源
                                         " keyword, " +
                                         " count(*) keyword_count, " +
                                         " unix_timestamp() * 1000 ts " + // 用来表示统计时间
                                         "from table( tumble( table keyword_table, descriptor(et), interval '5' second ) ) " +
                                         "group by window_start,window_end,  keyword");
        
        
        // 5. 把结果写出到ClickHouse中
        // 自定义流的sink: 把表转成流之后, 使用流的自定义sink来向clickhouse中写数据
        // 自定义sink可以在jdbc的基础上封装一个clickhouse sink
        SingleOutputStreamOperator<KeywordBean> beanStream = tEnv
            .toRetractStream(result, KeywordBean.class)
            .filter(t -> t.f0)
            .map(t -> t.f1);
        // 需要使用jdbc sink 封装一个clickhouse sink, 考虑通用性. 使用反射
        // 传入一个表名和一个class 剩下的信息工具可以从类中或者流中的数据获取
        beanStream.addSink(FlinkSinkUtil.getClickHouseSink("dws_traffic_source_keyword_page_view_window", KeywordBean.class));
    
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
