package com.example.test.stream1;

import com.example.constant.KafkaTopic;
import com.example.entity.stream1.*;
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueStore;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class KafkaStreamAllJoinExample {

    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-demo1");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.8.80:9092,192.168.8.80:9093,192.168.8.80:9094");
        props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://192.168.8.80:8081");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class.getName());
        props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, CustomRocksDBConfigSetter.class.getName());

        StreamsBuilder builder = new StreamsBuilder();
        KTable<String, A1> kATableFromStore = getStringKTable(builder,A1.class,KafkaTopic.ATopic1);
        KTable<String, B1> kBTableFromStore = getStringKTable(builder,B1.class,KafkaTopic.BTopic1);
        KTable<String, C1> kCTableFromStore = getStringKTable(builder,C1.class,KafkaTopic.CTopic1);
        KTable<String, D1> kDTableFromStore = getStringKTable(builder,D1.class,KafkaTopic.DTopic1);
        KTable<String, E1> kETableFromStore = getStringKTable(builder,E1.class,KafkaTopic.ETopic1);
        KTable<String, F1> kFTableFromStore = getStringKTable(builder,F1.class,KafkaTopic.FTopic1);
        KTable<String, G1> kGTableFromStore = getStringKTable(builder,G1.class,KafkaTopic.GTopic1);
        KTable<String, H1> kHTableFromStore = getStringKTable(builder,H1.class,KafkaTopic.HTopic1);
        KTable<String, I1> kITableFromStore = getStringKTable(builder,I1.class,KafkaTopic.ITopic1);
        KTable<String, J1> kJTableFromStore = getStringKTable(builder,J1.class,KafkaTopic.JTopic1);


        KStream<String, Source1> orderKStream = builder.stream(KafkaTopic.sourceId1);

        KStream<String, All1> join = orderKStream.join(kATableFromStore, (source, a) -> {
            All1 all = new All1();
            all.setId(source.getId());
            all.setA1(a.getA1());
            all.setA2(a.getA2());
            return all;
        }).join(kBTableFromStore, (all, b) -> {
            all.setB1(b.getB1());
            all.setB2(b.getB2());
            return all;
        }).join(kCTableFromStore, (all, c) -> {
            all.setC1(c.getC1());
            all.setC2(c.getC2());
            return all;
        }).join(kDTableFromStore, (all, d) -> {
            all.setD1(d.getD1());
            all.setD2(d.getD2());
            return all;
        }).join(kETableFromStore, (all, e) -> {
            all.setE1(e.getE1());
            all.setE2(e.getE2());
            return all;
        }).join(kFTableFromStore, (all, f) -> {
            all.setF1(f.getF1());
            all.setF2(f.getF2());
            return all;
        }).join(kGTableFromStore, (all, g) -> {
            all.setG1(g.getG1());
            all.setG2(g.getG2());
            return all;
        }).join(kHTableFromStore, (all, h) -> {
            all.setH1(h.getH1());
            all.setH2(h.getH2());
            return all;
        }).join(kITableFromStore, (all, i) -> {
            all.setI1(i.getI1());
            all.setI2(i.getI2());
            return all;
        }).join(kJTableFromStore, (all, j) -> {
            all.setJ1(j.getJ1());
            all.setJ2(j.getJ2());
            return all;
        });

        join.foreach((k,v)-> System.out.println("stream1:  "+v));
        join.to(KafkaTopic.ALLTopic1);
        KafkaStreams streams = new KafkaStreams(builder.build(), props);
        streams.setUncaughtExceptionHandler(new StreamsUncaughtExceptionHandler() {
            @Override
            public StreamThreadExceptionResponse handle(Throwable throwable) {
                throwable.printStackTrace();
                return null;
            }
        });
        streams.start();
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    }

    private static<T extends SpecificRecord> KTable<String, T> getStringKTable(StreamsBuilder builder,Class<T> tClass,String topic) {
        //构建table
        Map<String, String> serdeConfig = new HashMap<>();
        serdeConfig.put("schema.registry.url", "http://192.168.8.80:8081");

        Serde<T> tSerde = new SpecificAvroSerde<>();
        tSerde.configure(serdeConfig, false);
        // 从已有的State Store构建新的KTable
        KTable<String, T> kTableFromStore = builder.table(
                topic,
                Materialized.<String, T, KeyValueStore<Bytes, byte[]>>as(topic)
                        .withKeySerde(Serdes.String())
                        .withValueSerde(tSerde)
        );
        return kTableFromStore;
    }


}