package kuoge.practice.kafka.stream.member.rfm;

import io.netty.util.HashedWheelTimer;
import io.netty.util.TimerTask;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStore;

import java.time.Duration;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

import static org.apache.kafka.streams.KafkaStreams.State.NOT_RUNNING;

/**
 * @Description
 */
public class MemberRFMConsumer {
    private static final String ORDER_TOPIC = "order_topic";
    private static final String MEMBER_RFM_TOPIC = "member_rfm_topic";
    private static final Properties props = new Properties();
    private static final KafkaConsumer<String, String> kafkaConsumer;
    private final HashedWheelTimer hashedWheelTimer = new HashedWheelTimer();

    private volatile KafkaStreams kafkaStreams;

    static {
        props.put("bootstrap.servers", "localhost:9092");
        props.put("linger.ms", 1);
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("group.id", "order_topic_test");
        kafkaConsumer = new KafkaConsumer<>(props);
    }

    public void startConsume() {
         kafkaConsumer.subscribe(List.of(MEMBER_RFM_TOPIC));
         TimerTask task = timeout -> {
             while (true) {
                 final ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
                 for (ConsumerRecord<String, String> record : records) {
                     System.out.printf("consume member_rfm_topic message, offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
                 }
             }
         };
         hashedWheelTimer.newTimeout(task, 1000, TimeUnit.MILLISECONDS);
    }

    public void stopConsume() {
         TimerTask task = timeout ->  kafkaConsumer.close();
         hashedWheelTimer.newTimeout(task, 100, TimeUnit.MILLISECONDS);
    }

    public void startStreamConsume() {
        final Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-member-rfm");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        final StreamsBuilder streamsBuilder = new StreamsBuilder();
        final KStream<String, String> sourceKStream = streamsBuilder.stream(ORDER_TOPIC);
        final KStream<String, String> rKStream_1 = sourceKStream.transform(((TransformerSupplier<String, String, KeyValue<String,String>>) MyTransformer_1::new));
        final KStream<String, String> rKStream_2 = sourceKStream.transform(((TransformerSupplier<String, String, KeyValue<String,String>>) MyTransformer_2::new));
        final KStream<String, String> mergedStream = rKStream_1.merge(rKStream_2);
        final KGroupedStream<String, String> groupedStream= mergedStream.groupByKey();
        final KTable<String, String> aggregatedTable = groupedStream.aggregate(
                () -> "",  // 初始化值函数
                (key, value, aggregate) -> {
                    final String newValue = aggregate + "," + value;
                    System.out.println("the new value:" + newValue);
                    return newValue;
                },  // 聚合函数
                Materialized.as("aggregated-store")  // 设置状态存储
        );

        // 将聚合结果转换回流并发送到输出主题
        KStream<String, String> outputStream = aggregatedTable.toStream();
        outputStream.to(MEMBER_RFM_TOPIC);
        final Topology topology = streamsBuilder.build();
        try {
            kafkaStreams = new KafkaStreams(topology, props);
            kafkaStreams.start();
            System.out.println("start kafka stream success...");
        }catch (Exception e) {
            kafkaStreams.close();
        }
    }

    class MyTransformer_1 implements Transformer {
        private ProcessorContext context;
        private StateStore state;


        @Override
        public void init(ProcessorContext context) {
            this.context = context;
           // this.state = context.getStateStore("myTransformState");
           // context.schedule(Duration.ofSeconds(1), PunctuationType.WALL_CLOCK_TIME, new Punctuator(..));
        }

        @Override
        public Object transform(Object key, Object value) {
            if (key.toString().startsWith("member_a")) {
                return new KeyValue<>(key, "123");
            }else {
                return new KeyValue<>(key, "abc");
            }
        }

        @Override
        public void close() {

        }
    }

    class MyTransformer_2 implements Transformer {
        private ProcessorContext context;
        private StateStore state;


        @Override
        public void init(ProcessorContext context) {
            this.context = context;
           // this.state = context.getStateStore("myTransformState");
            // context.schedule(Duration.ofSeconds(1), PunctuationType.WALL_CLOCK_TIME, new Punctuator(..));
        }

        @Override
        public Object transform(Object key, Object value) {
            if (key.toString().startsWith("member_a")) {
                return new KeyValue<>(key, "456");
            }else {
                return new KeyValue<>(key, "def");
            }
        }

        @Override
        public void close() {

        }
    }

    public void stopStreamConsume() {
        if (NOT_RUNNING != kafkaStreams.state()) {
            kafkaStreams.close();
            System.out.println("stop kafka stream success...");
        }
    }
}