package com.example;

import com.example.constant.KafkaTopic;
import com.example.entity.Order;
import com.example.entity.OrderDetail;
import com.example.entity.User;
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueStore;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class KafkaStreamJoinExample {

    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "apppp");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.8.80:9092,192.168.8.80:9093,192.168.8.80:9094");
        props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://192.168.8.80:8081");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class.getName());

        StreamsBuilder builder = new StreamsBuilder();
        //构建table
        Map<String, String> serdeConfig = new HashMap<>();
        serdeConfig.put("schema.registry.url", "http://192.168.8.80:8081");

        Serde<User> userSerde = new SpecificAvroSerde<>();
        userSerde.configure(serdeConfig, false);
       // 从已有的State Store构建新的KTable
        KTable<String, User> kTableFromStore = builder.table(
                KafkaTopic.userTopic,
                Materialized.<String, User, KeyValueStore<Bytes, byte[]>>as(KafkaTopic.userTopic)
                        .withKeySerde(Serdes.String())
                        .withValueSerde(userSerde)
        );

        kTableFromStore.toStream().foreach((k,v)-> System.out.println(v));

        KStream<String, Order> orderKStream = builder.stream(KafkaTopic.orderTopic);

        orderKStream = orderKStream.map((KeyValueMapper<String, Order, KeyValue<String, Order>>) (string, order) -> {
            return new KeyValue<>(String.valueOf(order.getUserId()), order);
        });


        KStream<String, OrderDetail> join = orderKStream.join(kTableFromStore, (order, user) -> {
            OrderDetail orderDetail = new OrderDetail();
            orderDetail.setOrderId(order.getId());
            orderDetail.setUserId(user.getId());
            orderDetail.setPrice(order.getPrice());
            orderDetail.setAge(user.getAge());
            orderDetail.setName(user.getName());
            System.out.println(orderDetail);
            return orderDetail;
        });

        join.to(KafkaTopic.orderDetail);
        KafkaStreams streams = new KafkaStreams(builder.build(), props);
        streams.setUncaughtExceptionHandler(new StreamsUncaughtExceptionHandler() {
            @Override
            public StreamThreadExceptionResponse handle(Throwable throwable) {
                throwable.printStackTrace();
                return null;
            }
        });
        streams.start();
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    }




}