package com.Practice;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import java.time.Instant;
import java.util.Properties;

public class UserLoginAnalysis {
    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "user-login-analysis");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "niit01:9092");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        StreamsBuilder builder = new StreamsBuilder();
        KStream<String, String> kStream = builder.stream("test_login3");

//        KTable<String, Long> loginCounts = kStream
//                .mapValues(value -> value.split(",")[1])
//                .groupBy((key, value) -> value)
//                .count();
        KTable<String, Long> loginCounts = kStream
                .filter((key, value) -> {
                    long timestamp = Long.parseLong(value.split(",")[2]);
                    Instant instant = Instant.ofEpochMilli(timestamp);
                    Instant start = Instant.parse("2023-01-01T00:00:00Z");
                    Instant end = Instant.parse("2023-06-30T23:59:59Z");
                    return instant.isAfter(start) && instant.isBefore(end);
                })
                .mapValues(value -> value.split(",")[1])
                .groupBy((key, value) -> value)
                .count();


        loginCounts.toStream().foreach((userId, count) -> System.out.println("User: " + userId + ", Login Count: " + count));


        KafkaStreams streams = new KafkaStreams(builder.build(), props);
        streams.start();

    }
}
