package com.dtwave.lnstreaming.source;


import com.dtwave.constant.Constants;
import com.dtwave.param.KafkaParamsObj;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.config.StartupMode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Properties;

public class KafkaSource {

    private final static Logger LOGGER = LoggerFactory.getLogger(KafkaSource.class);

    public static FlinkKafkaConsumer kafkaSource(KafkaParamsObj kafkaParamsObj) {

        LOGGER.info("获取kafka topic列表");
        //topic列表
        String oggTopic = kafkaParamsObj.getOggTopic();
        String[] split = oggTopic.split(",");
        ArrayList<String> topics = new ArrayList<>();
        for (String topic : split) {
            topics.add(topic);
            LOGGER.info("加入topic：{}",topic);
        }
        Properties properties = new Properties();
        properties.setProperty("boostrap.servers",kafkaParamsObj.getKafkaUrl());
        properties.setProperty("group.id",kafkaParamsObj.getGroupId());
        //创建kafka消费对象
        FlinkKafkaConsumer<String> KafkaConsumer = new FlinkKafkaConsumer<>(topics, new SimpleStringSchema(), properties);

        //判断是否给定kafka消费日期，如给定从指定时间位置开始消费
        if(!Constants.KAFKA_START_TIMESTAMP.equals(kafkaParamsObj.getStartTimestamp())){
            //这里应该将日期kafkaParamsObj.getStartTimestamp() 转化为long类型模版暂时不实现
            KafkaConsumer.setStartFromTimestamp(10000);
        }else {
            //判断offset模式
            String startupMode = kafkaParamsObj.getStartUpModule();
            if(StartupMode.LATEST.name().equals(startupMode)){
                KafkaConsumer.setStartFromLatest();
            }else{
                KafkaConsumer.setStartFromGroupOffsets();
            }
        }
        //将kafka偏移量保存到checkpoint中
        KafkaConsumer.setCommitOffsetsOnCheckpoints(true);
        LOGGER.info("获取kafkaConsumer");
        return KafkaConsumer;
    }

}
