package ru.unlocker.reports.kafka;

import static org.apache.flink.api.common.RuntimeExecutionMode.STREAMING;
import static org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.getExecutionEnvironment;

import java.time.Duration;
import java.util.List;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import ru.unlocker.reports.domain.FoldingReportStreamJob;
import ru.unlocker.reports.domain.RawReportEvent;
import ru.unlocker.reports.domain.ReportAndTimerEvent;

public class KafkaFoldingReportStreamJob extends FoldingReportStreamJob {

  @Override
  protected DataStream<RawReportEvent> readReportStream(ParameterTool params, StreamExecutionEnvironment env) {
    KafkaSource<RawReportEvent> reportSource = buildKafkaSource();
    return env.fromSource(
        reportSource,
        WatermarkStrategy
            .<RawReportEvent>forBoundedOutOfOrderness(Duration.ofSeconds(5))
            .withTimestampAssigner((event, timestamp) -> event.getTimestamp()),
        "Kafka Report Source"
    );
  }

  @Override
  protected void writeReportStream(ParameterTool params, DataStream<ReportAndTimerEvent> outStream) {
    KafkaSink<ReportAndTimerEvent> sink = buildKafkaSink();
    outStream.sinkTo(sink);
  }

  public static KafkaSource<RawReportEvent> buildKafkaSource() {
    KafkaConfiguration configuration = new KafkaConfiguration();
    String groupId = "raw-reports";
    KafkaSource<RawReportEvent> source = KafkaSource
        .<RawReportEvent>builder()
        .setBootstrapServers(configuration.getBootstrapServers())
        .setGroupId(groupId)
        .setProperties(configuration.getConsumerProperties(groupId))
        .setTopics(List.of(configuration.getReportsTopicName()))
        .setDeserializer(new RawReportEventSchema(configuration.getReportsTopicName()))
        .setStartingOffsets(OffsetsInitializer.earliest())
        .build();
    return source;
  }

  public static KafkaSink<ReportAndTimerEvent> buildKafkaSink() {
    KafkaConfiguration configuration = new KafkaConfiguration();
    KafkaSink<ReportAndTimerEvent> sink = KafkaSink
        .<ReportAndTimerEvent>builder()
        .setBootstrapServers(configuration.getBootstrapServers())
        .setDeliveryGuarantee(DeliveryGuarantee.NONE)
        .setRecordSerializer(new ReportAndTimerEventSchema(configuration.getTimersTopicName()))
        .setKafkaProducerConfig(configuration.getProducerProperties("flink-reports-producer"))
        .build();
    return sink;
  }

  public static void main(String[] args) throws Exception {
    final StreamExecutionEnvironment env = getExecutionEnvironment().setRuntimeMode(STREAMING);
    ParameterTool params = ParameterTool.fromArgs(args);
    new KafkaFoldingReportStreamJob().createApplicationPipeline(params, env);
    env.execute("Folding Report Stream Job");
  }
}
