package com.atguigu.app.dws;

import com.atguigu.app.func.DwsSplitFunction;
import com.atguigu.common.Constant;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//数据流:web/app -> nginx -> 日志服务器(log文件) -> Flume -> Kafka(ODS) -> FlinkApp -> Kafka(DWD) -> FlinkApp -> Doris
//程  序:Mock -> Flume(f1.sh) -> Kafka(ZK) -> Dwd01_TrafficBaseLogSplit -> Kafka(ZK) -> Dws01_TrafficSourceKeywordPageViewWindow -> Doris(DWS)
public class Dws01_TrafficSourceKeywordPageViewWindow {

    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);  //生产环境中,主题并行度与Kafka主题的分区数保持一致
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //Logger logger = LoggerFactory.getLogger(Dwd01_TrafficBaseLogSplit.class);
        //logger.info("aaa");

        //1.1 开启CheckPoint
        env.enableCheckpointing(5000L);
        env.setStateBackend(new HashMapStateBackend());

        //1.2 CheckPoint相关设置
        //CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        //checkpointConfig.setCheckpointTimeout(10000L);
        //checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/flink-ck");
        //Cancel任务时保存最后一次CheckPoint结果
        //checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //checkpointConfig.setMinPauseBetweenCheckpoints(5000L);
        //设置重启策略
        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 1000L));

        //TODO 2.使用FlinkSQL读取Kafka 页面日志主题数据创建表
        tableEnv.executeSql("" +
                "create table page_log(\n" +
                "    `page` map<string,string>,\n" +
                "    `ts` bigint,\n" +
                "    `rt` as TO_TIMESTAMP_LTZ(`ts`,3),\n" +
                "    WATERMARK FOR `rt` AS `rt` - INTERVAL '2' SECOND\n" +
                ")" + KafkaUtil.getKafkaSourceDDL(Constant.TOPIC_DWD_TRAFFIC_PAGE, "keyword_page_view_230524"));

        //TODO 3.过滤出所需要的搜索数据
        Table filterTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    `page`['item'] item,\n" +
                "    `rt`\n" +
                "from page_log\n" +
                "where `page`['last_page_id'] = 'search'\n" +
                "and `page`['item'] is not null\n" +
                "and `page`['item_type'] = 'keyword'");
        tableEnv.createTemporaryView("filter_table", filterTable);

        //TODO 4.注册UDTF,并使用其进行分词处理
        tableEnv.createTemporarySystemFunction("split_function", DwsSplitFunction.class);
        Table splitTable = tableEnv.sqlQuery("" +
                "SELECT \n" +
                "    rt, \n" +
                "    word\n" +
                "FROM filter_table, LATERAL TABLE(split_function(item))");
        tableEnv.createTemporaryView("split_table", splitTable);

        //TODO 5.分组开窗聚合
        Table resultTable = tableEnv.sqlQuery("" +
                "SELECT \n" +
                "    DATE_FORMAT(window_start,'yyyy-MM-dd HH:mm:ss') stt, \n" +
                "    DATE_FORMAT(window_end,'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "    word keyword,\n" +
                "    DATE_FORMAT(window_start,'yyyy-MM-dd') cur_date, \n" +
                "    count(*)  keyword_count\n" +
                "FROM TABLE(\n" +
                "   TUMBLE(\n" +
                "     DATA => TABLE split_table,\n" +
                "     TIMECOL => DESCRIPTOR(rt),\n" +
                "     SIZE => INTERVAL '10' SECOND))\n" +
                "group by window_start, window_end,word");
        //resultTable.execute().print();

        //TODO 6.将数据输出到Doris
        tableEnv.executeSql("CREATE table doris_t(  " +
                " stt string, " +
                " edt string, " +
                " keyword string, " +
                " cur_date string, " +
                " keyword_count bigint " +
                ")WITH (" +
                "  'connector' = 'doris', " +
                "  'fenodes' = 'hadoop102:7030', " +
                " 'table.identifier' = 'gmall_230524.dws_traffic_source_keyword_page_view_window', " +
                "  'username' = 'root', " +
                "  'password' = '000000', " +
                "  'sink.properties.format' = 'json', " +
                "  'sink.properties.read_json_by_line' = 'true', " +
                "  'sink.buffer-count' = '4', " +
                "  'sink.buffer-size' = '4086'," +
                "  'sink.enable-2pc' = 'false' " + // 测试阶段可以关闭两阶段提交,方便测试
                ")");
        resultTable.insertInto("doris_t")
                .execute()
                .print();

        //TODO 7.启动
        env.execute("Dws01_TrafficSourceKeywordPageViewWindow");

    }
}
