package com.datastream;

import init.InitConf;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import com.datastream.process.MyDataProcesser;

import java.util.Properties;

import static init.InitConf.KAFKA_BOOTSTRAP;
import static init.InitConf.STREAMID;

/**
 * Created by kzdatd on 2020/1/13.
 */
public class StreamData {


    public static void main(String[] args) {
        InitConf initConf = new InitConf();
        initConf.init();
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, STREAMID);
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BOOTSTRAP);
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        final Topology topology = new Topology();
        topology.addSource("input",InitConf.TOPIC_IN)
                .addProcessor("com/datastream/process",new MyDataProcesser(),"input")
                .addSink("out",InitConf.TOPIC_OUT, "com/datastream/process");
        final KafkaStreams streams = new KafkaStreams(topology, props);

        // attach shutdown handler to catch control-c
        Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook") {
            @Override
            public void run() {
                System.out.println("logs sys data-stream shut down ");
                streams.close();
            }
        });

        try {
            streams.start();
        } catch (Throwable e) {
            System.exit(1);
        }
//        System.exit(0);
    }
}
