package com.bigdata.hbasedemo.producer;

import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.util.*;
import java.util.concurrent.ExecutionException;

public class KafkaProducerExample {
    private static final Logger logger = LogManager.getLogger(KafkaProducerExample.class);
    public static void main(final String... args) {
        // Create topic
        try{
            createTopic();
        } catch (ExecutionException |InterruptedException e){
            logger.error("An error occurred.", e);
        }
        Random random = new Random();

        final Producer<String, String> producer = createProducer();
        int EXAMPLE_PRODUCER_INTERVAL = 200;

        try {
            while (true) {
                String uuid = UUID.randomUUID().toString();

                DeviceData data = new DeviceData();
                data.setTimestamp(String.valueOf(System.currentTimeMillis()));
                data.setTemperature(String.valueOf(-10 + ((50-(-10)) * random.nextDouble())));
                data.setOperationData(String.valueOf(random.nextInt(10)));
                String input = data.toString();

                ProducerRecord<String, String> record = new ProducerRecord<>(Commons.EG_KAFKA_TOPIC, uuid, input);
                RecordMetadata metadata = producer.send(record).get();

                logger.info("Sent ({}, {}) to topic {} @ {}.", uuid, input, Commons.EG_KAFKA_TOPIC, metadata.timestamp());

                Thread.sleep(EXAMPLE_PRODUCER_INTERVAL);
            }
        } catch (InterruptedException | ExecutionException e) {
            logger.error("An error occurred.", e);
        } finally {
            producer.flush();
            producer.close();
        }
    }

    private static Producer<String, String> createProducer() {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Commons.EG_KAFKA_SERVER);
        props.put(ProducerConfig.CLIENT_ID_CONFIG, "KafkaProducerExample");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        return new KafkaProducer<>(props);
    }

    private static void createTopic() throws ExecutionException, InterruptedException {
        //分区数
        int partitions = 1;
        //副本数
        short replication = 1;
        Properties properties = new Properties();
        properties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
                Commons.EG_KAFKA_SERVER);
        AdminClient admin = AdminClient.create(properties);
        ListTopicsResult result = admin.listTopics();
        Set<String> result1 = result.names().get();
        System.out.println(result1);
        if(!result1.contains(Commons.EG_KAFKA_TOPIC)){
            NewTopic topic = new NewTopic(Commons.EG_KAFKA_TOPIC, partitions, replication);
            admin.createTopics(Collections.singletonList(topic));
            // 避免客户端连接太快断开而导致Topic没有创建成功
            Thread.sleep(500);
            logger.info("Topic {} created.", Commons.EG_KAFKA_TOPIC);
        }else{
            Thread.sleep(500);
            logger.info("Topic {} already exists.", Commons.EG_KAFKA_TOPIC);
        }
        admin.close();
    }
}


