package com.flink.source;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.*;
import java.util.concurrent.TimeUnit;


public class SourceKafka {

    //kafka集群列表
    public static final String BROKER_LIST = "localhost:9092";
    //kafka的topic
    public static final String TOPIC_WEB = "web";
    //kafka序列化的方式，采用字符串的形式
    public static final String KEY_SERIALIZER = "org.apache.kafka.common.serialization.StringSerializer";
    //value的序列化方式
    public static final String VALUE_SERIALIZER = "org.apache.kafka.common.serialization.StringSerializer";

    public static void writeToKafka(KafkaProducer<String, String> producer) throws Exception {
        // 将web生成的数据发送给kafka的记录
        String webDataJson = WebDataProducer.webDataProducer();
        ProducerRecord<String, String> record = new ProducerRecord<String, String>(TOPIC_WEB, null, null, webDataJson);

        producer.send(record);
        System.out.println("向kafka发送数据：" + webDataJson);
        producer.flush();
    }

    public static void main(String[] args) {
        Properties props = new Properties(); //实例化一个Properties
        props.put("bootstrap.servers", BROKER_LIST);
        props.put("key.serializer", KEY_SERIALIZER);
        props.put("value.serializer", VALUE_SERIALIZER);

        // 构建Kafka生产者
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);
        while (true) {
            try {
                // 每三秒写一条数据
                TimeUnit.SECONDS.sleep(3);
                writeToKafka(producer);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
}
