package com.nepu.gmall.realtime.app.dws;

import com.nepu.gmall.realtime.app.func.KeywordUDTF;
import com.nepu.gmall.realtime.bean.KeywordBean;
import com.nepu.gmall.realtime.util.ClickHouseUtil;
import com.nepu.gmall.realtime.util.KafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 流量域来源关键词粒度页面浏览各窗口汇总表（FlinkSQL）
 * (1) 从kafka页面浏览详情表中读取数据
 * (2) 过滤出search行为的页面
 * (3) 使用自定义的udtf对数据进行分词
 * (4) 统计各窗口各关键词出现频次
 * (5) 写入 ClickHouse
 *
 * 数据的流向：
 * mock --> 日志服务器 --> f1.sh --> kafka --> BaseLogApp.class --> kafka --> DwsTrafficSourceKeywordPageViewWindow.class --> ClickHouse
 * @author chenshuaijun
 * @create 2023-02-27 21:55
 */
public class DwsTrafficSourceKeywordPageViewWindow {

    public static void main(String[] args) throws Exception {

        // TODO 1、创建流式的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        /*env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                3, Time.days(1), Time.minutes(1)
        ));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage(
                "hdfs://hadoop102:8020/ck"
        );
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、读取dwd_traffic_page_log中的数据
        tableEnv.executeSql("" +
                "create table page_log( " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " + // 这里是构建一个时间字段
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                ")"+KafkaUtils.getKafkaDDL("dwd_traffic_page_log","DwsTrafficSourceKeywordPageViewWindow"));
        // TODO 3、过滤出搜索的页面
        Table filterSrearchPageTable = tableEnv.sqlQuery("" +
                "select " +
                "    `page`['item'] keyword, " +
                "    rt " +
                "from page_log " +
                "where `page`['last_page_id']='search' and `page`['item_type']='keyword' and page['item'] is not null");

        tableEnv.createTemporaryView("keyword_search", filterSrearchPageTable);
        // TODO 4、对keyword进行分词
        // 注册udaf函数
        tableEnv.createTemporarySystemFunction("SplitFunction", KeywordUDTF.class);
        Table splitTable = tableEnv.sqlQuery("" +
                "SELECT  " +
                "    word,  " +
                "    rt " +
                "FROM keyword_search, LATERAL TABLE(SplitFunction(keyword))");

        tableEnv.createTemporaryView("split_result",splitTable);
        // TODO 5、开窗统计关键词出现的频率
        Table reslutTable = tableEnv.sqlQuery("" +
                "select " +
                "    DATE_FORMAT(TUMBLE_START(rt, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt, " +
                "    DATE_FORMAT(TUMBLE_END(rt, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt, " +
                "    'search' source, " +
                "    word keyword, " +
                "    count(*) keyword_count, " +
                "    UNIX_TIMESTAMP()*1000 ts " +
                "from split_result " +
                "GROUP BY TUMBLE(rt, INTERVAL '10' second),word");

        tableEnv.createTemporaryView("result",reslutTable);

        DataStream<KeywordBean> keywordBeanDataStream = tableEnv.toAppendStream(reslutTable, KeywordBean.class);
        // TODO 6、将数据写出到ClickHouse
        keywordBeanDataStream.addSink(ClickHouseUtil.getJdbcSink("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));

        env.execute();
    }
}
