package com.neusoft.kfk;

import org.apache.kafka.clients.producer.KafkaProducer;

import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.internals.KafkaProducerMetrics;
import org.apache.kafka.common.protocol.types.Field;
import org.apache.kafka.common.serialization.StringSerializer;

public class MyKfkProducer {

    public static void main(String[] args) throws InterruptedException {

        Properties prop = new Properties();
        //指定kafka的broker地址
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9091,127.0.0.1:9092,127.0.0.1:9093");
        //指定key-value数据的序列化格式
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());


        //指定topic
        String topic = "flink";

        //创建kafka生产者
        KafkaProducer<String, String> producer = new KafkaProducer<String, String>(prop);

        for (int i = 0; i < 86400; i++) {
            //向topic中生产数据
            String msg = System.currentTimeMillis() + "," + i + "," + i + "," + i;
            System.out.println(msg);
            producer.send(new ProducerRecord<String, String>(topic, msg));
            Thread.sleep(1000);
            producer.flush();
        }
        //关闭链接
        producer.close();

    }

}
