package lowlevel;

import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.ProcessorSupplier;

import java.util.Properties;

/**
 * @Author:RenPu
 * @Date: 2020/3/27 13:15
 * @Version: 1.0
 * @description:
 */
public class WortCountWithProcessorAPI {

    public static void main(String[] args) {

        //1创建配置对象
        Properties properties = new Properties();

        //2与kafka集群地址获取连接
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"");

        //key的序列化器和反序列化器
        properties.put(StreamsConfig.DEFAULT_WINDOWED_KEY_SERDE_INNER_CLASS, Serdes.Integer().getClass());

        //value的序列化器和反序列化器
        properties.put(StreamsConfig.DEFAULT_WINDOWED_KEY_SERDE_INNER_CLASS,Serdes.String().getClass());

        //设置拉取时消费组的ID
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG,"");

        //设置流处理，开启的线程数量
        properties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG,2);

        //手动编制拓扑
        Topology topology = new Topology();

        //给拓扑任务添加资源的标识，订阅的主题(数据来源)
        topology.addSource("s1","t7");

        //给拓扑任务添加处理器(匿名内部类的写法)
//        topology.addProcessor("p1", new ProcessorSupplier() {
//            public Processor get() {
//                return new LineSplitProcesser();
//            }
//        },"s1");//p1处理器的数据来源于，s1的source的主题t7

        //给拓扑任务添加处理器(lamda的写法)（拓扑任务逻辑处理的单元）
        topology.addProcessor("p1",()->new WordCountProcesserTwo(),"s1");


        //topology.addProcessor("p2",()->new WordCountProcesser(),"p1");

        //给拓扑任务添加输出(流数据处理结果的输出)
        topology.addSink("k1","t8",new StringSerializer(),new LongSerializer(),"p1");


        //初始化kafka流处理对象

        KafkaStreams streams = new KafkaStreams(topology, properties);

        //运行流处理对象
        streams.start();



    }
}
