package ru.unlocker.reports.kafka;

import static org.apache.flink.api.common.RuntimeExecutionMode.STREAMING;
import static org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.getExecutionEnvironment;

import java.sql.Date;
import java.sql.Timestamp;
import java.time.Duration;
import java.util.List;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions.JdbcConnectionOptionsBuilder;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import ru.unlocker.reports.domain.CalculateReportStatusJob;
import ru.unlocker.reports.domain.ReportAndTimerEvent;
import ru.unlocker.reports.domain.ReportStatus;

public class KafkaCalculateReportStatusJob extends CalculateReportStatusJob {

  @Override
  protected DataStream<ReportAndTimerEvent> readStatusStream(ParameterTool params, StreamExecutionEnvironment env) {
    KafkaSource<ReportAndTimerEvent> timerSource = buildKafkaSource();
    return env.fromSource(
        timerSource,
        WatermarkStrategy
            .<ReportAndTimerEvent>forBoundedOutOfOrderness(Duration.ofSeconds(5))
            .withTimestampAssigner((event, timestamp) -> event.getTimestamp()),
        "Kafka Timer Source"
    );
  }

  @Override
  protected void writeStatusStream(ParameterTool params, DataStream<ReportStatus> outStream) {
    SinkFunction<ReportStatus> sink = JdbcSink.sink(
        "insert into report_status (event_time, status, report_code, workday) values (?, ?, ?, ?)",
        (preparedStatement, reportStatus) -> {
          preparedStatement.setTimestamp(1, Timestamp.from(reportStatus.timestamp()));
          preparedStatement.setString(2, reportStatus.state().name());
          preparedStatement.setString(3, reportStatus.reportCode());
          preparedStatement.setDate(4, Date.valueOf(reportStatus.workday()));
        },
        JdbcExecutionOptions.builder()
            .withBatchSize(1000)
            .withBatchIntervalMs(200)
            .withMaxRetries(5)
            .build(),
        new JdbcConnectionOptionsBuilder()
            .withUrl("jdbc:postgresql://postgres:5432/customs")
            .withDriverName("org.postgresql.Driver")
            .withUsername("customs")
            .withPassword("customs")
            .build()
    );
    outStream.addSink(sink);
  }

  public static KafkaSource<ReportAndTimerEvent> buildKafkaSource() {
    KafkaConfiguration configuration = new KafkaConfiguration();
    String groupId = "reports-and-timers";
    KafkaSource<ReportAndTimerEvent> source = KafkaSource
        .<ReportAndTimerEvent>builder()
        .setBootstrapServers(configuration.getBootstrapServers())
        .setGroupId(groupId)
        .setProperties(configuration.getConsumerProperties(groupId))
        .setTopics(List.of(configuration.getTimersTopicName()))
        .setDeserializer(new ReportAndTimerEventSchema(configuration.getTimersTopicName()))
        .setStartingOffsets(OffsetsInitializer.earliest())
        .build();
    return source;
  }

  public static void main(String[] args) throws Exception {
    final StreamExecutionEnvironment env = getExecutionEnvironment().setRuntimeMode(STREAMING);
    ParameterTool params = ParameterTool.fromArgs(args);
    new KafkaCalculateReportStatusJob().createApplicationPipeline(params, env);
    env.execute("Calculate Report Status Job");
  }
}
