package org.budo.dubbo.protocol.async.repository.activemq;

import java.util.Map;
import java.util.concurrent.Future;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.serialization.BytesSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.budo.support.lang.util.MapUtil;

public class Kafka {
    public static void main(String[] args) {
        Map<String, Object> configs = MapUtil.stringObjectMap();

        configs.put("bootstrap.servers", "192.168.4.253:9092");
        configs.put("enable.auto.commit", "false"); // 不自动提交

        configs.put("key.deserializer", StringDeserializer.class.getName());
        configs.put("value.deserializer", BytesDeserializer.class.getName());

        configs.put("key.serializer", StringSerializer.class.getName());
        configs.put("value.serializer", BytesSerializer.class.getName());
        KafkaProducer<String, Bytes> kafkaProducer = new KafkaProducer<String, Bytes>(configs);
        byte[] requestBody = "xxxxxxxxxxxxxxxxxxxxx".getBytes();
        Bytes bytes = Bytes.wrap(requestBody);

        ProducerRecord<String, Bytes> producerRecord = new ProducerRecord<String, Bytes>("TOPIC_WW_CHAT_MSG", bytes);
        Future<RecordMetadata> future = kafkaProducer.send(producerRecord);
        System.err.println(future);

//        ProducerRecord<String, Bytes> producerRecord2 = new ProducerRecord<String, Bytes>("test_env_p_code.test_env_b_code", bytes);
//        Future<RecordMetadata> future2 = kafkaProducer.send(producerRecord2);
//        System.err.println(future2);

        kafkaProducer.close();
    }
}