package com.itzmn.tmall.kafkastream;

import java.util.Arrays;
import java.util.Properties;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.TopologyBuilder;
import org.apache.kafka.streams.state.KeyValueStore;

/*
 * @Author: 张梦楠
 * @Date: 2019/7/7 16:52
 * 简书：https://www.jianshu.com/u/d611be10d1a6
 * 码云：https://gitee.com/zhangqiye
 * @Description:  kafkastream 处理类入口
 */
public class KafkaStreamApplication {


    public static void main(String[] args) {

        String from = "log";
        String to = "process";
        Properties properties = new Properties();

        // 设置application id
        properties.setProperty(StreamsConfig.APPLICATION_ID_CONFIG,"stream-tmall");
        // 设置kafka地址
        properties.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.33.3:9092,192.168.33.4:9092,192.168.33.5:9092");

        // 创建拓扑结构
        Topology topology = new Topology();
        // 构建数据来源，数据处理逻辑，数据去向
        topology.addSource("SOURCE",from)
                .addProcessor("PROCESS", ()-> new LogProcesser(), "SOURCE")
                .addSink("SINK", to, "PROCESS");
        //过时
//        TopologyBuilder builder = new TopologyBuilder();
//        builder.addSource("SOURCE",from)
//                .addProcessor("PROCESS", ()->new LogProcesser(), "SOURCE")
//                .addSink("SINK",to);
//
//
//        // 创建kafkastream
        KafkaStreams streams = new KafkaStreams(topology,properties);
        // 开启流处理
        streams.start();
        System.out.println("kafkaStream is start!!!");


    }

}
