package com.atguigu.gmallrealtime.app.dws;

import com.atguigu.gmallrealtime.app.func.KeywordUDTF;
import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.DorisUtil;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author yhm
 * @create 2023-10-08 15:19
 */
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 添加检查点和状态后端
        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);

        //2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 设置job取消之后 检查点是否保留
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 设置两个检查点之间最小的时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));

        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");

        System.setProperty("HADOOP_USER_NAME", "atguigu");

        // TODO 3 读取dwd_traffic_page主题数据
        String groupId = "dws_traffic_source_keyword_page_view_window";
        tableEnv.executeSql("create table page_log(\n" +
                "    `common` map<STRING,STRING>,\n" +
                "    `page` map<STRING,STRING>,\n" +
                "    `ts` bigint,\n" +
                "    row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000, 'yyyy-MM-dd HH:mm:ss')), \n" +
                "    WATERMARK FOR row_time AS row_time - INTERVAL '3' SECOND \n" +
                ")"
                + MyKafkaUtil.getKafkaDDL(Constant.TOPIC_DWD_TRAFFIC_PAGE, groupId));

        // TODO 4 过滤出搜索关键字
        Table keywordTable = tableEnv.sqlQuery("select \n" +
                "  page['item'] keyword,\n" +
                "  row_time\n" +
                "from page_log\n" +
                "where page['item'] is not null \n" +
                "and page['last_page_id']='search'\n" +
                "and page['item_type']='keyword'");

        tableEnv.createTemporaryView("keyword_table", keywordTable);

        // TODO 5 注册自定义udtf函数
        tableEnv.createTemporarySystemFunction("iKSplitFunction", KeywordUDTF.class);


        // TODO 6 使用函数对关键字进行拆词
        Table wordTable = tableEnv.sqlQuery("SELECT keyword, " +
                "   word, \n " +
                "  row_time\n" +
                "FROM keyword_table, LATERAL TABLE(iKSplitFunction(keyword))");
        tableEnv.createTemporaryView("word_table", wordTable);

        // TODO 7 对关键词进行开窗聚合统计
        Table resultTable = tableEnv.sqlQuery("SELECT\n" +
                "  TUMBLE_START(row_time, INTERVAL '10' second) AS stt,\n" +
                "  TUMBLE_END(row_time, INTERVAL '10' second) AS edt,\n" +
                "  word keyword,\n" +
                "  '20231009'  cur_date,\n" +
                "  count(*) keyword_count \n" +
                "FROM word_table\n" +
                "GROUP BY\n" +
                "  TUMBLE(row_time, INTERVAL '10' second),\n" +
                "  word");


        // TODO 8 创建doris表格的写出映射
        tableEnv.executeSql("create table doris_sink(\n" +
                "    `stt` TIMESTAMP(3),\n" +
                "    `edt` TIMESTAMP(3),\n" +
                "    `keyword` STRING,\n" +
                "    `cur_date` STRING,\n" +
                "    keyword_count bigint\n" +
                ")"
                + DorisUtil.getDorisSinkDDL("dws_traffic_source_keyword_page_view_window"));

        // TODO 9 写出数据到doris
        resultTable.insertInto("doris_sink").execute();

    }
}
