package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSQLApp;
import com.atguigu.realtime.bean.KeywordBean;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.function.KWSplit;
import com.atguigu.realtime.util.FlinkSinkUtil;
import com.atguigu.realtime.util.SQLUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @auther yuzehua
 * @date 2022/10/27 - 13:44
 */
public class Dws_TrafficKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_TrafficKeywordPageViewWindow().init(
                2050,
                2,
                "Dws_TrafficSourceKeywordPageViewWindow"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        // 1. 建立一个动态表与 dwd 页面日志关联
        tEnv.executeSql("create table dwd_traffic_page(" +
                "page map<String, String>, " +
                "ts bigint, " +
                "et as to_timestamp_ltz(ts, 3), " +
                "watermark for et as et - interval '3' second " +  // 定义时间属性: 事件时间. 类型必须是 timestamp(3)
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE, "Dws_01_DwsTrafficSourceKeywordPageViewWindow"));


        // 2. 过滤page_id = course_list 的记录, 读取课程关键词
        Table kwTable = tEnv.sqlQuery("select " +
                " page['item'] keyword, " +
                " et " +
                " from dwd_traffic_page " +
                " where (page['page_id']='course_list')" +
                " and page['item_type']='keyword' " +
                " and page['item'] is not null ");
        tEnv.createTemporaryView("kw_table", kwTable);

        // 3. 对关键词进行分词 (暂时的关键词是不要分词的，都是单个单词，但是考虑的将来可能有会交叉学科，
        // 课程信息有多个关键词，所以进行分词)
        /*tEnv.createTemporaryFunction("kw_split", KWSplit.class);
        Table keywordTable = tEnv.sqlQuery("select " +
                "keyword, " +
                "et " +
                "from kw_table " +
                "join lateral table(kw_split(kw)) on true");
        tEnv.createTemporaryView("keyword_table", keywordTable);
        tEnv.sqlQuery("select * from keyword_table").execute().print();*/

        // 4. 统计每次后的每个词的出现的次数: 分组开窗聚和
        Table result = tEnv.sqlQuery("select " +
                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                " keyword, " +
                " count(keyword) keyword_count, " +
                " unix_timestamp() * 1000 ts " +
                "from table( tumble( table kw_table, descriptor(et), interval '5' second ) ) " +
                "group by keyword, window_start, window_end");
        //result.execute().print();
        DataStream<Tuple2<Boolean, KeywordBean>> tuple2DataStream = tEnv
                .toRetractStream(result, KeywordBean.class);
        //tuple2DataStream.print();
        tuple2DataStream.filter(value -> value.f0)
                .map(value -> value.f1)
                .addSink(FlinkSinkUtil.getClickHouseSink("dws_traffic_keyword_page_view_window", KeywordBean.class));


        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
