package kuoge.practice.kafka.stream.member.activity.conversionrate;

import com.alibaba.fastjson.JSON;
import io.netty.util.HashedWheelTimer;
import io.netty.util.TimerTask;
import kuoge.practice.kafka.stream.member.KafkaConfigFactory;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.Stores;

import java.time.Duration;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.concurrent.TimeUnit;

import static kuoge.practice.kafka.stream.member.TopicConstants.*;
import static org.apache.kafka.streams.KafkaStreams.State.NOT_RUNNING;

/**
 * @Description
 */
public class EventConsumer {
    private static final KafkaConsumer<String, Long> kafkaConsumer;
    private final HashedWheelTimer hashedWheelTimer = new HashedWheelTimer();
    private volatile KafkaStreams kafkaStreams;
    private final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");

    static {
        kafkaConsumer = new KafkaConsumer<>(KafkaConfigFactory.getConsumerProperties());
    }

    public void startItemConversionRateProcess() {
        // 定义窗口统计周期
        final TimeWindows timeWindow = TimeWindows.of(Duration.ofSeconds(10));
        final StreamsBuilder streamsBuilder = new StreamsBuilder();
        // key是商品ID，value是商品详情页面的访问次数
        // TODO: 补充时间窗口内用户ID的去重
        final KStream<String, String> itemVisitKStream = streamsBuilder.stream(ITEM_VISIT_TOPIC, Consumed.with(Serdes.String(), Serdes.String()));
        final KTable<Windowed<String>, Long> itemVisitKTable = itemVisitKStream
                .peek((key, value) -> System.out.printf("in the itemVisitKTable transformation,  second = %s, key:%s, value:%s", LocalDateTime.now().format(dateTimeFormatter), key, value))
                .groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
                .windowedBy(timeWindow)
                .count(Materialized.as("item_visit_counts_" + timeWindow.size()));


        // key是商品ID，value是商品实际购买次数
        final KStream<String, String> itemPurchaseKStream = streamsBuilder.stream(ITEM_PURCHASE_TOPIC, Consumed.with(Serdes.String(), Serdes.String()));
        final KTable<Windowed<String>, Long> itemPurshaseKTable = itemPurchaseKStream
                .peek((key, value) -> System.out.printf("in the itemPurchaseKTable peek transformation,  second = %s, key:%s, value:%s", LocalDateTime.now().format(dateTimeFormatter), key, value))
                .groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
                .windowedBy(timeWindow)
                .count(Materialized.as("item_purchase_counts_" + timeWindow.size()));

        itemVisitKTable
                // table之间的关联查询， 默认按 key 关联
                .leftJoin(itemPurshaseKTable, (visit, purchase) -> new ItemConversionRateEvent(((double) purchase / visit) * 100, visit, purchase, System.currentTimeMillis()))
                .toStream()
                .peek((key, value) -> System.out.printf("in the itemConversionRateEventKTable peek transformation, second = %s, key:%s, value:%s%n", LocalDateTime.now().format(dateTimeFormatter), key.key(), value))
                .map((k, v) -> new KeyValue<>(k.key(), JSON.toJSONString(v)))
                .to(ITEM_CONVERSION_RATE_TOPIC, Produced.with(Serdes.String(), Serdes.String()));

        try {
            kafkaStreams = new KafkaStreams(streamsBuilder.build(), KafkaConfigFactory.getStreamProperties());
            kafkaStreams.start();
        } catch (Exception e) {
            kafkaStreams.close();
            e.printStackTrace();
        }
    }

    public void startConversionRateReport() {
        kafkaConsumer.subscribe(List.of(ITEM_CONVERSION_RATE_TOPIC));
        TimerTask task = timeout -> {
            while (true) {
                final ConsumerRecords<String, Long> records = kafkaConsumer.poll(Duration.ofMillis(100));
                for (ConsumerRecord<String, Long> record : records) {
                    System.out.printf("consume item_conversion_rate_topic message, second window = %d, key = %s, value = %d%n", Long.parseLong(record.key().split("#")[0]) / 1000, record.key().split("#")[1], record.value());
                }
            }
        };
        hashedWheelTimer.newTimeout(task, 1000, TimeUnit.MILLISECONDS);
    }

    public void stopConversionRateReport() {
        TimerTask task = timeout -> kafkaConsumer.close();
        hashedWheelTimer.newTimeout(task, 100, TimeUnit.MILLISECONDS);
    }

    public void stopConversionRateProcess() {
        if (NOT_RUNNING != kafkaStreams.state()) {
            kafkaStreams.close();
            System.out.println("stop kafka stream success...");
        }
    }
}
