package com.example.kafkastreamworkcount.config;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Produced;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.config.KafkaStreamsConfiguration;

import java.util.Properties;

import static java.util.Locale.filter;

@EnableKafkaStreams
@EnableKafka
@Configuration
public class KafkaConfig {

    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;

    @Value("${spring.kafka.streams.application-id}")
    private String applicationId;


//    @Bean
//    public KafkaStreams kafkaStreams(KafkaStreamsConfiguration streamsConfig) {
//        Properties props = new Properties();
//        props.put("bootstrap.servers", "localhost:9092");
//        props.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
//        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
//        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
//        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
//        props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1);
//
//        StreamsBuilder builder = new StreamsBuilder();
//        KStream<String, String> kStream = builder.stream("retry-events", Consumed.with(Serdes.String(), Serdes.String()));
//
//        // Define a custom ObjectMapper for JSON parsing
//        ObjectMapper objectMapper = new ObjectMapper();
//
//        // Use mapValues to parse JSON and extract the timestamp
//        kStream = kStream.mapValues(value -> {
//            try {
//                JsonNode jsonNode = objectMapper.readTree(value);
//                long timestamp = jsonNode.get("timestamp").asLong();
//                long tenMinutesAgo = System.currentTimeMillis() / 60000; // Current time in minutes
//                if (timestamp <= tenMinutesAgo) {
//                    return value; // Keep messages that match the criteria
//                } else {
//                    return null; // Filter out messages that don't match the criteria
//                }
//            } catch (Exception e) {
//                e.printStackTrace();
//                return null;
//            }
//        }).filter((key, value) -> value != null); // Remove messages that failed parsing
//
//        kStream.to("output-topic", Produced.with(Serdes.String(), Serdes.String()));
//
//        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), props);
//        kafkaStreams.start();
//        return kafkaStreams;
//    }
}
