package kafka;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.kafka.bolt.KafkaBolt;
import org.apache.storm.kafka.bolt.mapper.FieldNameBasedTupleToKafkaMapper;
import org.apache.storm.kafka.bolt.selector.DefaultTopicSelector;
import org.apache.storm.topology.TopologyBuilder;

import java.util.Properties;

public class WriteToKafkaTopology {
    public static void main(String[] args) throws Exception {
        // 定义kafka生产者属性
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "liu:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        WordSpout wordSpout = new WordSpout();
        KafkaBolt<String, String> objectObjectKafkaBolt = new KafkaBolt<String, String>()
                .withProducerProperties(properties)
                .withTopicSelector(new DefaultTopicSelector("storm-kafka-topic"))
                .withTupleToKafkaMapper(new FieldNameBasedTupleToKafkaMapper<>());

        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("wordSpout", wordSpout);
        builder.setBolt("objectObjectKafkaBolt", objectObjectKafkaBolt).shuffleGrouping("wordSpout");

        Config config = new Config();
        LocalCluster localCluster = new LocalCluster();
        localCluster.submitTopology("storm-kafka-topology", config, builder.createTopology());
    }
}
