package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.bean.KeywordStats;
import com.atguigu.gmall.realtime.constant.GmallConstant;
import com.atguigu.gmall.realtime.function.KeywordAnalyzeFunction;
import com.atguigu.gmall.realtime.utils.ClickhouseUtils;
import com.atguigu.gmall.realtime.utils.KafkaUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 关键词主题统计(使用 flink sql 实现)
 *
 * @author lvbingbing
 * @date 2022-07-06 15:29
 * @see KeywordStatsAppStreamApi StreamAPI 方式实现
 */
@Slf4j
public class KeywordStatsAppSql {
    public static void main(String[] args) throws Exception {
        // 1. 基本环境准备
        // 1.1 流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 1.2 并行度设置
        env.setParallelism(1);
        // 2. 检查点相关设置
        // 2.1 开启检查点
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);
        // 2.2 设置检查点超时时间
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.setCheckpointTimeout(60000L);
        // 2.3 开启外部化检查点，作业取消时保留检查点
        checkpointConfig.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 2.4 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 6000L));
        // 2.5 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/ck/gmall");
        // 2.6 指定操作 hdfs 用户
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        // 3. 创建表环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
        // 4. 把数据源定义为动态表
        String topic = "dwd_page_log";
        String groupId = "keyword_stats_app_group";
        tableEnv.executeSql("CREATE TABLE page_view (" +
                " common MAP<STRING,STRING>, " +
                " page MAP<STRING,STRING>, " +
                " ts BIGINT, " +
                " row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss')), " +
                " WATERMARK FOR row_time AS row_time - INTERVAL '3' SECOND) " +
                " WITH (" + KafkaUtils.getConnectInfo(topic, groupId) + ")");
        // 5. 过滤数据
        Table fullWordTable = tableEnv.sqlQuery("" +
                " select " +
                " page['item'] as full_word, " +
                " row_time " +
                " from page_view " +
                " where page['page_id'] = 'good_list' and page['item'] is not null");
        // 6. 注册自定义表函数实现分词
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordAnalyzeFunction.class);
        tableEnv.createTemporaryView("full_word_table", fullWordTable);
        Table keywordTable = tableEnv.sqlQuery("" +
                " select " +
                " keyword, " +
                " row_time " +
                " from full_word_table, " +
                " LATERAL TABLE(ik_analyze(full_word)) as T(keyword)");
        tableEnv.createTemporaryView("keyword_table", keywordTable);
        // 7. 开窗聚合计算
        // todo 以下这段开窗聚合计算存在问题，有可能是版本兼容的问题，自测时一直没有数据。
        Table sqlQuery = tableEnv.sqlQuery("" +
                " select " +
                " DATE_FORMAT(window_start, 'yyyy-MM-dd HH:mm:ss') as stt, " +
                " DATE_FORMAT(window_end,   'yyyy-MM-dd HH:mm:ss') as edt, " +
                " keyword, " +
                " count(*) as ct, " +
                " '" + GmallConstant.KEYWORD_SEARCH + "' as source, " +
                " UNIX_TIMESTAMP() * 1000 as ts " +
                " FROM TABLE(TUMBLE(TABLE keyword_table, DESCRIPTOR(row_time), INTERVAL '10' SECOND)) " +
                " group by window_start, window_end, keyword ");
        // 8. 将结果转换成流，将其写入到 clickhouse 中
        DataStream<KeywordStats> keywordStatsDs = tableEnv.toAppendStream(sqlQuery, KeywordStats.class);
        keywordStatsDs.map(s -> {
            log.info("关键词主题数据：{}, ", s);
            return s;
        });
        // todo 以上这段开窗聚合计算存在问题，有可能是版本兼容的问题，自测时一直没有数据。
        String executeSql = "insert into keyword_stats_2021(stt,edt,keyword,source,ct,ts) values(?,?,?,?,?,?)";
        keywordStatsDs.addSink(ClickhouseUtils.operateClickhouseJdbcSink(executeSql));
        // 9. 触发程序执行
        env.execute();
    }
}

























