package com.sillyhat.studylibrary;

import org.apache.commons.codec.StringEncoder;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;




public class kafkaProducer extends Thread{

    private String topic;

    public kafkaProducer(String topic){
        super();
        this.topic = topic;
    }


    @Override
    public void run() {
        Producer producer = createProducer();
        int i=0;
        while(true){
            String data = "message: " + i++;
            System.out.println(data);
            producer.send(new ProducerRecord<String, String>(topic,""));
            try {
                TimeUnit.SECONDS.sleep(1);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    private Producer createProducer() {
        Map<String,Object> properties = new HashMap<String,Object>();
        properties.put("zookeeper.connect", "192.168.86.128:12181,192.168.86.129:12181,192.168.86.131:12181");//声明zk
        properties.put("serializer.class", StringEncoder.class.getName());
        properties.put("metadata.broker.list", "192.168.86.128:19001,192.168.86.129:19001,192.168.86.131:19001");// 声明kafka broker
        //创建kafka的生产者类
        return new KafkaProducer<String, String>(properties);
    }
    public static void main(String[] args) {
        new kafkaProducer("log_collect_topic").start();// 使用kafka集群中创建好的主题 test

    }
}
