package cn.com.bluemoon.bd.flink.creater.source;

import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.io.InputStream;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.Properties;


public class KafkaSourceCreater {

    private Properties props;
    private ParameterTool parameterTool;

    public KafkaSourceCreater(InputStream is) {
        try {
            parameterTool = ParameterTool.fromPropertiesFile(is);
        } catch (Exception e) {
            throw new RuntimeException("初始化kafka失败,缺少属性文件：kafka.properties", e);
        }

        try {
            props = new Properties();
            props.setProperty("bootstrap.servers", parameterTool.get("bootstrap.servers"));
            props.setProperty("group.id", parameterTool.get("group.id"));
            props.setProperty("flink.partition-discovery.interval-millis", "30000");
        } catch (NullPointerException e) {
            throw new RuntimeException("初始化kafka失败，kafka.properties文件缺少bootstrap.servers或者group.id属性", e);
        }
    }


    public KafkaSourceCreater(Properties props) {
        this.props = props;
    }

    /**
     * 创建kafka数据源，根据main函数参数设置并行度
     *
     * @param env
     * @param topic
     * @return
     */
    public DataStreamSource<String> createSource(StreamExecutionEnvironment env, String topic, String[] args) {
        FlinkKafkaConsumer<String> consumer010 = createConsumer(topic, args);
        ParameterTool argsTool = ParameterTool.fromArgs(args);
        int commonParallelism = argsTool.getInt("kafka_partition", 3);
        int parallelism = argsTool.getInt("kafka_partition_" + topic, commonParallelism);
        return env.addSource(consumer010).setParallelism(parallelism);
    }

    /**
     * 创建kafka数据源，并指定对应的并行度
     *
     * @param env
     * @param topic
     * @param partitionNum
     * @return
     */
    public DataStreamSource<String> createSource(StreamExecutionEnvironment env, String topic, int partitionNum, String[] args) {
        FlinkKafkaConsumer<String> consumer010 = createConsumer(topic, args);
        return env.addSource(consumer010).setParallelism(partitionNum);
    }

    /**
     * 创建kafka consumer
     *
     * @param topic
     * @return
     */
    public FlinkKafkaConsumer<String> createConsumer(String topic, String[] args) {
        FlinkKafkaConsumer<String> consumer010 = new FlinkKafkaConsumer(topic, new SimpleStringSchema(), props);
        ParameterTool argsTool = ParameterTool.fromArgs(args);
        String startupMode = argsTool.get("kafka.startup.mode");

        if (StringUtils.isEmpty(startupMode) && parameterTool != null) {
            startupMode = parameterTool.get("startup.mode");
        }

        if (StringUtils.isNotEmpty(startupMode)) {
            switch (startupMode.toUpperCase()) {
                case "EARLIEST":
                    consumer010.setStartFromEarliest();
                    break;
                case "LATEST":
                    consumer010.setStartFromLatest();
                    break;
                case "GROUP_OFFSETS":
                    consumer010.setStartFromGroupOffsets();
                    break;
                case "TIMESTAMP":
                    String startTimestamp = argsTool.get("kafka.start.from.timestamp");

                    if (StringUtils.isEmpty(startTimestamp)) {
                        String todayStart = LocalDateTime.of(LocalDate.now(), LocalTime.MIN).format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss"));

                        if (parameterTool != null) {
                            startTimestamp = parameterTool.get("start.from.timestamp", todayStart);
                        } else {
                            startTimestamp = todayStart;
                        }

                    }

                    consumer010.setStartFromTimestamp(timestampToMs(startTimestamp, "yyyyMMddHHmmss"));
                    break;
                default:
                    break;
            }
        } else {
            consumer010.setStartFromEarliest();
        }

        return consumer010;
    }

    private static long timestampToMs(String timestamp, String format) {
        DateTimeFormatter df = DateTimeFormatter.ofPattern(format);
        LocalDateTime dateTime = LocalDateTime.parse(timestamp, df);
        return dateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
    }

}
