package com.fourth.app.dws;

import com.fourth.app.func.splitFunction;
import com.fourth.bean.KeywordBean;
import com.fourth.utils.MyClickHouseUtil;
import com.fourth.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author wjy
 * @create 2022-08-20 9:22
 */
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.读取dwd层页面浏览日志数据(dwd_traffic_page_log) 创建动态表并提取事件时间生产watermark
        String topic = "dwd_traffic_page_log";
        String groupId = "source_keyword_page";
        tableEnv.executeSql("" +
                "create table page_view( " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    rt AS TO_TIMESTAMP_LTZ(ts,3), " +
                "  WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                ")" + MyKafkaUtil.getKafkaDDL(topic,groupId));

        //3.过滤搜索行为的日志
        Table filterTable = tableEnv.sqlQuery("" +
                "select " +
                "   `page`['item'] full_word, " +
                "    rt " +
                "from page_view " +
                "where `page`['item'] is not null " +
                "and `page`['last_page_id'] = 'search' " +
                "and `page`['item_type'] = 'keyword'");
        tableEnv.createTemporaryView("filter_table",filterTable);

        //4.注册自定义的UDTF函数
        tableEnv.createTemporaryFunction("sptFuc", splitFunction.class);

        //5.使用UDTF函数进行分词处理
        Table splitTable = tableEnv.sqlQuery("" +
                "select " +
                "    word, " +
                "    rt " +
                "from filter_table, lateral table(sptFuc(full_word))");
        tableEnv.createTemporaryView("split_table",splitTable);
        //6.分组、开窗、聚合
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "    'search' source, " +
                "    date_format(TUMBLE_START(rt, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt, " +
                "    date_format(TUMBLE_END(rt, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt, " +
                "    word keyword, " +
                "    count(*) keyword_count, " +
                "    UNIX_TIMESTAMP() ts " +
                "from split_table " +
                "group by word, " +
                "TUMBLE(rt, INTERVAL '10' SECOND)");
        tableEnv.createTemporaryView("result_table",resultTable);

        //7.将动态表转成流
        DataStream<KeywordBean> keywordDStream = tableEnv.toAppendStream(resultTable, KeywordBean.class);
        keywordDStream.print(">>>>>>>>>>");
        //8.写出到clickHouse
        keywordDStream.addSink(MyClickHouseUtil.getSink("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));

        //9.启动任务
        env.execute("DwsTrafficSourceKeywordPageViewWindow");
    }
}
