package com.asiainfo.dacp.common.datasource;

import com.asiainfo.dacp.common.desr.KafkaDeserialize;
import org.apache.flink.core.fs.Path;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

/**
 * 用于读取Kafka数据源，后续有其他数据源也可以直接扩展即可
 */
public abstract class DataBaseSource implements Serializable {

    final static Logger logger = LoggerFactory.getLogger(DataBaseSource.class);

    /**
     * 启动类 传递配置文件Properties
     *
     * @return
     */
    public static DataStreamSource<String> run(StreamExecutionEnvironment env, Properties properties) {

        init(env,properties);//上下文初始化
        //获取到的Kafka|DDS数据
        DataStreamSource<String> ds = getDataSource(env, properties);
         return ds;
    }


    /**
     * 传入配置文件信息，返回流数据信息
     * Kafka|DDS
     *
     * @param env        运行时环境
     * @param properties 配置文件
     * @return
     */
    private static DataStreamSource<String> getDataSource(StreamExecutionEnvironment env,
                                                  Properties properties) {
        DataStreamSource<String> dataDs = null;
        String dataSourceType = properties.getProperty("datasource.type", "kafka");
        if (dataSourceType.equals("kafka")) {
            Properties kafkaPro = new Properties();
            kafkaPro.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getProperty("kafka.source.servers"));
            kafkaPro.setProperty(ConsumerConfig.GROUP_ID_CONFIG, properties.getProperty("kafka.source.consumer"));
            kafkaPro.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
            kafkaPro.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

            //判断是否需要认证
            if (properties.getProperty("kakfa.isAuth", "N").equals("Y")) {
                kafkaPro.setProperty("security.protocol", properties.getProperty("security.protocol"));
                kafkaPro.setProperty("sasl.mechanism", "PLAIN");
                String username = properties.getProperty("kakfa.source.username");
                String password = properties.getProperty("kafka.source.password");
                kafkaPro.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"" + username + "\" password=\"" + password + "\";");
            }

            //同时消费多个主题
            String topics = properties.getProperty("kafka.source.topics");
            String[] elem = topics.split(",");
            List<String> topic = Arrays.asList(elem);

            FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(topic, new KafkaDeserialize(), kafkaPro);

            //设置消费者模式
            String consumerMode = properties.getProperty("kafka.consumer.mode", "latest");
            if (consumerMode.equals("groupoffset")) {
                kafkaConsumer.setStartFromGroupOffsets();
            } else if (consumerMode.equals("earliest")) {
                kafkaConsumer.setStartFromEarliest();
            } else {
                kafkaConsumer.setStartFromLatest();
            }
            dataDs = env.addSource(kafkaConsumer);
        } else {
            logger.error("dataSourceType:{} 数据源类型不匹配,程序退出", dataSourceType);
            System.exit(-1);
        }
        return dataDs;
    }


    /**
     * 进行初始化操作，对上下文对象
     * @param env
     * @param properties
     */
    private static void init(StreamExecutionEnvironment env, Properties properties) {
        //默认并行度
        env.setParallelism(Integer.parseInt(properties.getProperty("flink.source.parallelism", "1")));
        String isCheckPoint = properties.getProperty("flink.execute.ischeckpoint", "N");
        if (isCheckPoint.equals("Y")) {
            //设置周期
            env.enableCheckpointing(Integer.
                    parseInt(properties.getProperty("flink.execute.checkpoint.interval", "30000")));

            //设置检查点类型 精确一次
            env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

            //检查点重试次数
            env.getCheckpointConfig().setTolerableCheckpointFailureNumber(Integer.
                    parseInt(properties.getProperty("flink.execute.checkpoint.retry", "2")));

            //检查点超时时间
            env.getCheckpointConfig().setCheckpointTimeout(Integer.
                    parseInt(properties.getProperty("flink.execute.checkpoint.timeout", "30000")));
            //设置检查点存储路径
            env.getCheckpointConfig().setCheckpointStorage(new Path(properties.getProperty("flink.execute.checkpoint.path")));
            //设置任务手动取消后 会持久化检查点
            env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
            //设置检查点后端
            env.setStateBackend(new FsStateBackend(properties.getProperty("flink.execute.checkpoint.path"), false));
            //检查点失败 fail任务
            env.getCheckpointConfig().setFailOnCheckpointingErrors(true);
        }
    }

}

