package com.edata.bigdata.flink;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Properties;
import java.io.Serializable;


public class StreamSource implements Serializable {

    public Logger logger = LoggerFactory.getLogger(this.getClass());
    public StreamExecutionEnvironment env;
    //yyyy-MM-dd HH:mm:ss:SSS
    public String dateFormat;


    public <T> DataStream<T> createDataStream(Properties sourceProps,
                                              Properties offsetProps,
                                              Class<T> clazz) {

        if (offsetProps == null) {
            offsetProps = new Properties();
        }
        DeserializationSchema<T> jsonSchema = new JsonDeserializationSchema<>(clazz);
        KafkaSource<T> kafkaSource = KafkaSource
                .<T>builder()
                .setBootstrapServers(sourceProps.getProperty("kafka.bootstrap.servers"))
                .setTopics(sourceProps.getProperty("subscribe"))
                .setGroupId(sourceProps.getProperty("group.id"))
                .setValueOnlyDeserializer(jsonSchema)
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST))
                .setProperties(offsetProps)
                .build();

        WatermarkStrategy<T> watermarkStrategy = WatermarkStrategy.noWatermarks();
        boolean watermarksEnabled = Boolean.parseBoolean(sourceProps.getProperty("watermarks.enabled"));
        if (watermarksEnabled) {
            long maxOutOfOrderSecond = Long.parseLong(sourceProps.getProperty("watermarks.maxOutOfOrder.sec"));
            long idleness = Long.parseLong(sourceProps.getProperty("watermarks.idleness.sec"));
            watermarkStrategy = WatermarkStrategy
                    .<T>forBoundedOutOfOrderness(Duration.ofSeconds(maxOutOfOrderSecond))
                    .withTimestampAssigner(new EventTimeExtractor<>(clazz, dateFormat))
                    .withIdleness(Duration.ofSeconds(idleness));
        }
        return env.fromSource(kafkaSource, watermarkStrategy, "Kafka Source");

    }

    public DataStream<String> createDataStream(Properties sourceProps,
                                               Properties offsetProps) {
        if (offsetProps == null) {
            offsetProps = new Properties();
        }
        KafkaSource<String> kafkaSource = KafkaSource
                .<String>builder()
                .setBootstrapServers(sourceProps.getProperty("kafka.bootstrap.servers"))
                .setTopics(sourceProps.getProperty("subscribe"))
                .setGroupId(sourceProps.getProperty("group.id"))
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST))
                .setProperties(offsetProps)
                .build();

        WatermarkStrategy<String> watermarkStrategy = WatermarkStrategy.noWatermarks();
        boolean watermarksEnabled = Boolean.parseBoolean(sourceProps.getProperty("watermarks.enabled"));
        if (watermarksEnabled) {
            long maxOutOfOrderSecond = Long.parseLong(sourceProps.getProperty("watermarks.maxOutOfOrder.sec"));
            long idleness = Long.parseLong(sourceProps.getProperty("watermarks.idleness.sec"));
            watermarkStrategy = WatermarkStrategy
                    .<String>forBoundedOutOfOrderness(Duration.ofSeconds(maxOutOfOrderSecond))
                    .withTimestampAssigner(new EventTimeExtractor<>(String.class, dateFormat))
                    .withIdleness(Duration.ofSeconds(idleness));
        }
        return env.fromSource(kafkaSource, watermarkStrategy, "Kafka Source");

    }


    public <T> WindowedStream<T, String, TimeWindow> applySlidingWindows(
            DataStream<T> dataStream,
            long windowSize,
            long slidingSize,
            long lateness,
            Class<T> clazz) {

        //先根据key进行分组，相同key的数据会分到一起，作为一组，每个组有各自的统计窗口。
        return dataStream.keyBy(new EventKeyExtractor<>(clazz))
                .window(SlidingEventTimeWindows.of(Duration.ofSeconds(windowSize), Duration.ofSeconds(slidingSize)))
                .allowedLateness(Duration.ofSeconds(lateness));
    }


    public void start(String jobName) throws Exception {
        if (env != null) {
            env.execute(jobName);
        }
    }

    public StreamSource(StreamExecutionEnvironment env, String dateFormat) {
        this.env = env;
        this.dateFormat = dateFormat;
    }

    public StreamSource(StreamExecutionEnvironment env) {
        this.env = env;
    }
}
