package com.flink;


import com.flink.unit.Constant;
import com.flink.unit.LogInfoStreamUnit;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;


public class KafkaSinkES {
    /**
     * 使用指定类初始化日志对象
     */
    private static Logger logger = LoggerFactory.getLogger(KafkaSinkES.class);

    public static void main(String[] args) {
        logger.info("开始执行Flink拉取数据过滤后存入到ES库中.....");
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置检查点时间为10秒
        env.enableCheckpointing(1000*10);
        // 设置检查模式  恰好一次
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        // 设置检查点之间的最小暂停时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
        // 设置检查点超时 60秒
        env.getCheckpointConfig().setCheckpointTimeout(1000*60);
        // 设置最大并发检查点
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        // 外部的检查点  保留撤销
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);


        Constant constant = new Constant();
        // kafka配置
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, constant.brokers);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, constant.commit);
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, constant.reset);
        logger.debug("开始消费kafka数据");
        LogInfoStreamUnit.startStream(env, constant, constant.topic1, constant.table1, constant.kafka_group1, props);
//        LogInfoStreamUnit.startStream(env, constant, constant.topic2, constant.table2, constant.kafka_group2, props);
//        LogInfoStreamUnit.startStream(env, constant, constant.topic3, constant.table3, constant.kafka_group3, props);
        try {
            env.execute("Flink解析日志");
        } catch (Exception e) {
            e.printStackTrace();
            logger.error(e.getMessage());
        }
    }
}
