package com.JadePenG.Storm;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.kafka.bolt.KafkaBolt;
import org.apache.storm.kafka.spout.KafkaSpout;
import org.apache.storm.kafka.spout.KafkaSpoutConfig;
import org.apache.storm.topology.TopologyBuilder;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/*
 * Spout 一般是消费者
 * Bolt  一般是生产者
 * */
public class WCTopologyMain {
    public static void main(String[] args) throws InvalidTopologyException, AuthorizationException, AlreadyAliveException {
        //1.构建topology
        TopologyBuilder topologyBuilder = new TopologyBuilder();
        //创建kafkaSpout对象  topic logs  消费者
        KafkaSpoutConfig.Builder<String, String> builder = KafkaSpoutConfig.builder("node01:9092", "logs");
        //config 需要通过builder.build()才能获取到 --> builder需要通过静态方法builder获取 然后需要两个参数
        KafkaSpoutConfig<String, String> kafkaSpoutConfig = builder.build();
        KafkaSpout<String, String> kafkaSpout = new KafkaSpout<>(kafkaSpoutConfig);
        //消费者组
        builder.setGroupId("hello_storm");
        //1.1设置kafkaSpout消费logs中的日志
        topologyBuilder.setSpout("kafkaSpout", kafkaSpout);
        //1.2设置SplitBolt
        topologyBuilder.setBolt("splitBolt", new SplitBolt()).shuffleGrouping("kafkaSpout");
        //1.3设置wordcountBolt
        topologyBuilder.setBolt("wordcpuntBolt", new WordCountBolt()).shuffleGrouping("splitBolt");
        //1.4设置kafkaBolt  topic:keywords  生产者
        Properties props = new Properties();
        props.put("bootstrap.servers", "node01:9092");
        props.put("topic", "keywords");
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        KafkaBolt<String, String> kafkaBolt = new KafkaBolt<String, String>().withProducerProperties(props);
        topologyBuilder.setBolt("kafkaBolt", kafkaBolt).shuffleGrouping("wordcpuntBolt");

        //2.上传代码到集群，运行   1 本地方式  2 集群
        Config config = new Config();
        //集群的配置，集群要认识kafka topic：keywords
        Map<String, String> map = new HashMap<>();
        // 配置Kafka broker地址
        map.put("metadata.broker.list", "node01:9092");
        map.put("bootstrap.servers", "node01:9092");
        // 配置KafkaBolt中的kafka.broker.properties
        config.put("kafka.broker.properties", map);
        // 配置KafkaBolt生成的topic
        config.put("topic", "keywords");

        System.out.println(args);
        if (args != null && args.length > 0) {
            //什么时候在本地运行，什么时候在集群运行
            //集群  集群上任务名称不能重复
            System.out.println("集群运行");
            StormSubmitter.submitTopology(args[0], config, topologyBuilder.createTopology());

        } else {//main调用的时候是给参数，那么就是在集群上运行
            System.out.println("本地运行");
            LocalCluster cluster = new LocalCluster();
            cluster.submitTopology("wordcount", config, topologyBuilder.createTopology());
        }
    }
}












