package com.share.bigdata.flink.countword.conf;

import com.share.bigdata.common.constants.ConfConstant;
import com.share.bigdata.common.init.Env;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;

public class CommonConf {

    /**
     * 连接redis
     * 1. FlinkJedisPoolConfig
     * 2. FlinkJedisSentinelConfig
     * 3. FlinkJedisClusterConfig
     */
    public static FlinkJedisPoolConfig getFlinkJedisPoolConfig() {
        FlinkJedisPoolConfig.Builder builder = new FlinkJedisPoolConfig.Builder();
        builder.setHost(Env.getRedis().getHost());
        if (StringUtils.isNotBlank(Env.getRedis().getPwd())) {
            builder.setPassword(Env.getRedis().getPwd());
        }

        FlinkJedisPoolConfig conf = builder.build();
        return conf;
    }

    public static FlinkKafkaConsumer getKafkaConsumerSource() {
        // kafka connector
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", Env.getKafka().getHost());
        properties.setProperty("group.id", Env.getKafka().getGroupId());
        // kafka 消息的key序列化器
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // kafka 消息的value序列化器
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        List<String> topics = Arrays.asList(ConfConstant.TOPIC_RISK_RESULT_INFO);
        FlinkKafkaConsumer<String> kafkaSource = new FlinkKafkaConsumer<>(topics, new SimpleStringSchema(), properties);

        kafkaSource.setStartFromLatest();
        return kafkaSource;
    }
}
