package Watermarks.TimestampsperKafkaPartition;

import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.kafka.common.serialization.StringDeserializer;
import scala.math.Ordering;

import java.io.IOException;
import java.util.Properties;

/**
 * 2 * @Author: 王杰
 * 3 * @Date: 2020/11/12 11:20
 * 4
 */
public class KafkaWaterMarker {
    public static final String brokerList = GlobalKafkaConfig.brokerList;
    public static final String topic = GlobalKafkaConfig.topic;
    public static final String groupId = GlobalKafkaConfig.groupId;

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        Properties properties = new Properties();
        properties.put("key.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("bootstrap.servers", brokerList);
        properties.put("group.id", groupId);

        DeserializationSchema<MyType> schema = new DeserializationSchema<MyType>() {
            @Override
            public MyType deserialize(byte[] message) throws IOException {
                return new MyType(new String(message));
            }

            @Override
            public boolean isEndOfStream(MyType nextElement) {
                return false;
            }

            @Override
            public TypeInformation<MyType> getProducedType() {
                return null;
            }
        };

        FlinkKafkaConsumer<MyType> consumer = new FlinkKafkaConsumer<>("wang", schema, properties);
        /*FlinkKafkaConsumerBase<MyType> kafkaConsumerBase = consumer.assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<MyType>() {
            private long currentMaxTimestamp;
            private final long maxOutOfOrderness = 3500;

            @Override
            public long extractTimestamp(MyType element, long recordTimestamp) {
                long timestamp = element.getCreationTime();
                currentMaxTimestamp = Math.max(timestamp, currentMaxTimestamp);
                return timestamp;
            }

            @Override
            public Watermark getCurrentWatermark() {
                return new Watermark(currentMaxTimestamp - maxOutOfOrderness);
            }
        });*/
        DataStreamSource<MyType> source = env.addSource(consumer);
        source.print();
        env.execute();

/*        FlinkKafkaConsumer010<MyType> kafkaSource = new FlinkKafkaConsumer010<>("myTopic", schema, props);
        kafkaSource.assignTimestampsAndWatermarks(new AscendingTimestampExtractor<MyType>() {

            @Override
            public long extractAscendingTimestamp(MyType element) {
                return element.eventTimestamp();
            }
        });

        DataStream<MyType> stream = env.addSource(kafkaSource);*/
    }
}
