/**
 * @Author cheng
 * @Date 2022 11 06 21 49
 **/

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;
import java.util.Random;

public class Producer {
    public static void main(String[] args) {

        // 1. 创建 kafka 生产者的配置对象
        Properties properties = new Properties();
        // 2. 给 kafka 配置对象添加配置信息：bootstrap.servers
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.121.129:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");


        while (true) {
            // 3. 创建 kafka 生产者对象
            KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
            System.out.println("开始发送数据==========================");
            try {
                // 4. 调用 send 方法,发送消息
                for (int i = 0; i < 5; i++) {
                    Random rd = new Random(); // 创建实例
                    // 品类
                    String category = "procuct"+rd.nextInt(10);
                    // 生成随机的货品号、已进货数量、已售卖数量和上架状态
                    int productId = 1000000 + rd.nextInt(10); // 货品号范围在1000000到1000010之间
                    int stockIn = rd.nextInt(1000); // 已进货数量范围在0到999之间
                    int soldOut = rd.nextInt(stockIn); // 已售卖数量范围在0到已进货数量之间
                    char status = rd.nextBoolean() ? 'U' : 'D'; // 上架状态随机为U或D

                    // 拼接数据
                    String data = productId + "\t" + category + "\t" + stockIn + "\t" + soldOut + "\t" + status;
                    System.out.println("发送数据为: " + data);

                    try {
                        kafkaProducer.send(new ProducerRecord<String,String>("supermarket", data));
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
                Thread.sleep(5000); // 每隔5秒推送一次数据
            } catch (InterruptedException e) {
                e.printStackTrace();
            }


            // 5. 关闭资源
            kafkaProducer.close();

        }

    }
}
