package sgt.test;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.storm.utils.Utils;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.Properties;
import java.util.Random;
import java.util.UUID;

public class Producer {
    public static String topic="mytopic";
    public static BufferedReader bufferedReader = null;
    public static void main(String[] args) {
        Properties p = new Properties();
        p.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"niit-master:9092");
        p.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        p.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(p);


        try {
            bufferedReader=new BufferedReader(new FileReader("src/main/resources/lipstick.csv"));
            String str="";
            bufferedReader.readLine();
            while (true) {
                str=bufferedReader.readLine();
//                Utils.sleep(1000);
                if (str==null){
                    break;
                }

                ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic,
                        UUID.randomUUID().toString(),
                        str);

                kafkaProducer.send(record, new Callback() {
                    @Override
                    public void onCompletion(RecordMetadata metadata, Exception exception) {
                        System.out.println("offset=" + metadata.offset() +
                                "partition" + metadata.partition());
                    }
                });
                System.out.println("消息发送成功:" + str);
//                Thread.sleep(1000);



            }
        }catch (Exception e) {

        }
        finally {
            kafkaProducer.close();
        }


















    }
}
